blob: e0ad06281f336df3586834caee836a9403e76f4e [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010044
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000045static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000046static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010047
Mark Mendell24f2dfa2015-01-14 19:51:45 -050048static constexpr int kC2ConditionMask = 0x400;
49
Roland Levillain62a46b22015-06-01 18:24:13 +010050#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010051#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010052
Andreas Gampe85b62f22015-09-09 13:15:38 -070053class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010054 public:
Nicolas Geoffray39468442014-09-02 15:17:15 +010055 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Alexandre Rames2ed20af2015-03-06 13:55:35 +000057 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000058 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000060 if (instruction_->CanThrowIntoCatchBlock()) {
61 // Live registers will be restored in the catch block if caught.
62 SaveLiveRegisters(codegen, instruction_->GetLocations());
63 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000064 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
65 instruction_,
66 instruction_->GetDexPc(),
67 this);
Roland Levillain888d0672015-11-23 18:53:50 +000068 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010069 }
70
Alexandre Rames8158f282015-08-07 10:26:17 +010071 bool IsFatal() const OVERRIDE { return true; }
72
Alexandre Rames9931f312015-06-19 14:47:01 +010073 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
74
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +010076 HNullCheck* const instruction_;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
78};
79
Andreas Gampe85b62f22015-09-09 13:15:38 -070080class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000081 public:
82 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : instruction_(instruction) {}
83
Alexandre Rames2ed20af2015-03-06 13:55:35 +000084 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000085 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000086 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000087 if (instruction_->CanThrowIntoCatchBlock()) {
88 // Live registers will be restored in the catch block if caught.
89 SaveLiveRegisters(codegen, instruction_->GetLocations());
90 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000091 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
92 instruction_,
93 instruction_->GetDexPc(),
94 this);
Roland Levillain888d0672015-11-23 18:53:50 +000095 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000096 }
97
Alexandre Rames8158f282015-08-07 10:26:17 +010098 bool IsFatal() const OVERRIDE { return true; }
99
Alexandre Rames9931f312015-06-19 14:47:01 +0100100 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
101
Calin Juravled0d48522014-11-04 16:40:20 +0000102 private:
103 HDivZeroCheck* const instruction_;
104 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
105};
106
Andreas Gampe85b62f22015-09-09 13:15:38 -0700107class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000108 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100109 DivRemMinusOneSlowPathX86_64(Register reg, Primitive::Type type, bool is_div)
Calin Juravlebacfec32014-11-14 15:54:36 +0000110 : cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000111
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000113 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000114 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 if (is_div_) {
116 __ negl(cpu_reg_);
117 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400118 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 }
120
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000121 } else {
122 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negq(cpu_reg_);
125 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400126 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000128 }
Calin Juravled0d48522014-11-04 16:40:20 +0000129 __ jmp(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
133
Calin Juravled0d48522014-11-04 16:40:20 +0000134 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000136 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const bool is_div_;
138 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000139};
140
Andreas Gampe85b62f22015-09-09 13:15:38 -0700141class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000142 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100143 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100144 : instruction_(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000146 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000147 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000149 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
151 instruction_,
152 instruction_->GetDexPc(),
153 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000154 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000155 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100156 if (successor_ == nullptr) {
157 __ jmp(GetReturnLabel());
158 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000159 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100160 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000161 }
162
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 Label* GetReturnLabel() {
164 DCHECK(successor_ == nullptr);
165 return &return_label_;
166 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100168 HBasicBlock* GetSuccessor() const {
169 return successor_;
170 }
171
Alexandre Rames9931f312015-06-19 14:47:01 +0100172 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
173
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000174 private:
175 HSuspendCheck* const instruction_;
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100176 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 Label return_label_;
178
179 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
180};
181
Andreas Gampe85b62f22015-09-09 13:15:38 -0700182class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100183 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100184 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
185 : instruction_(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100186
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000187 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100188 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000189 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000191 if (instruction_->CanThrowIntoCatchBlock()) {
192 // Live registers will be restored in the catch block if caught.
193 SaveLiveRegisters(codegen, instruction_->GetLocations());
194 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000195 // We're moving two locations to locations that could overlap, so we need a parallel
196 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000198 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100199 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000200 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100201 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100202 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100203 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
204 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000205 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
206 instruction_,
207 instruction_->GetDexPc(),
208 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000209 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100210 }
211
Alexandre Rames8158f282015-08-07 10:26:17 +0100212 bool IsFatal() const OVERRIDE { return true; }
213
Alexandre Rames9931f312015-06-19 14:47:01 +0100214 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
215
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100216 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +0100217 HBoundsCheck* const instruction_;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218
219 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
220};
221
Andreas Gampe85b62f22015-09-09 13:15:38 -0700222class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100223 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000224 LoadClassSlowPathX86_64(HLoadClass* cls,
225 HInstruction* at,
226 uint32_t dex_pc,
227 bool do_clinit)
228 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
229 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
230 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100231
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000232 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000234 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100236
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000237 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000240 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000241 x86_64_codegen->InvokeRuntime(do_clinit_ ?
242 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
243 QUICK_ENTRY_POINT(pInitializeType),
244 at_,
245 dex_pc_,
246 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000247 if (do_clinit_) {
248 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
249 } else {
250 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
251 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000253 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000254 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000255 if (out.IsValid()) {
256 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000257 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 }
259
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000260 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100261 __ jmp(GetExitLabel());
262 }
263
Alexandre Rames9931f312015-06-19 14:47:01 +0100264 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
265
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100266 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267 // The class this slow path will load.
268 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000270 // The instruction where this slow path is happening.
271 // (Might be the load class or an initialization check).
272 HInstruction* const at_;
273
274 // The dex PC of `at_`.
275 const uint32_t dex_pc_;
276
277 // Whether to initialize the class.
278 const bool do_clinit_;
279
280 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100281};
282
Andreas Gampe85b62f22015-09-09 13:15:38 -0700283class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000284 public:
285 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {}
286
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 LocationSummary* locations = instruction_->GetLocations();
289 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
290
Roland Levillain0d5a2812015-11-13 10:07:31 +0000291 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000293 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000294
295 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800296 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000297 Immediate(instruction_->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000298 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
299 instruction_,
300 instruction_->GetDexPc(),
301 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000302 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000303 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000304 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000305 __ jmp(GetExitLabel());
306 }
307
Alexandre Rames9931f312015-06-19 14:47:01 +0100308 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
309
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000310 private:
311 HLoadString* const instruction_;
312
313 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
314};
315
Andreas Gampe85b62f22015-09-09 13:15:38 -0700316class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000318 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
319 : instruction_(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000320
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000321 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000322 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100323 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
324 : locations->Out();
325 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000326 DCHECK(instruction_->IsCheckCast()
327 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000328
Roland Levillain0d5a2812015-11-13 10:07:31 +0000329 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000332 if (!is_fatal_) {
333 SaveLiveRegisters(codegen, locations);
334 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335
336 // We're moving two locations to locations that could overlap, so we need a parallel
337 // move resolver.
338 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000339 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100340 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000341 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100342 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100343 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100344 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
345 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000347 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
349 instruction_,
350 dex_pc,
351 this);
352 CheckEntrypointTypes<
353 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000354 } else {
355 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000356 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
357 instruction_,
358 dex_pc,
359 this);
360 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000361 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000362
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000363 if (!is_fatal_) {
364 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000365 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000366 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000367
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 RestoreLiveRegisters(codegen, locations);
369 __ jmp(GetExitLabel());
370 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000371 }
372
Alexandre Rames9931f312015-06-19 14:47:01 +0100373 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
374
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 bool IsFatal() const OVERRIDE { return is_fatal_; }
376
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377 private:
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000379 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000380
381 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
382};
383
Andreas Gampe85b62f22015-09-09 13:15:38 -0700384class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700385 public:
386 explicit DeoptimizationSlowPathX86_64(HInstruction* instruction)
387 : instruction_(instruction) {}
388
389 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000390 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391 __ Bind(GetEntryLabel());
392 SaveLiveRegisters(codegen, instruction_->GetLocations());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393 DCHECK(instruction_->IsDeoptimize());
394 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000395 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
396 deoptimize,
397 deoptimize->GetDexPc(),
398 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000399 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 }
401
Alexandre Rames9931f312015-06-19 14:47:01 +0100402 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
403
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404 private:
405 HInstruction* const instruction_;
406 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
407};
408
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100409class ArraySetSlowPathX86_64 : public SlowPathCode {
410 public:
411 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : instruction_(instruction) {}
412
413 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
414 LocationSummary* locations = instruction_->GetLocations();
415 __ Bind(GetEntryLabel());
416 SaveLiveRegisters(codegen, locations);
417
418 InvokeRuntimeCallingConvention calling_convention;
419 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
420 parallel_move.AddMove(
421 locations->InAt(0),
422 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
423 Primitive::kPrimNot,
424 nullptr);
425 parallel_move.AddMove(
426 locations->InAt(1),
427 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
428 Primitive::kPrimInt,
429 nullptr);
430 parallel_move.AddMove(
431 locations->InAt(2),
432 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
433 Primitive::kPrimNot,
434 nullptr);
435 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
436
Roland Levillain0d5a2812015-11-13 10:07:31 +0000437 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
438 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
439 instruction_,
440 instruction_->GetDexPc(),
441 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000442 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100443 RestoreLiveRegisters(codegen, locations);
444 __ jmp(GetExitLabel());
445 }
446
447 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
448
449 private:
450 HInstruction* const instruction_;
451
452 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
453};
454
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000455// Slow path marking an object during a read barrier.
456class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
457 public:
458 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
459 : instruction_(instruction), out_(out), obj_(obj) {
460 DCHECK(kEmitCompilerReadBarrier);
461 }
462
463 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
464
465 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
466 LocationSummary* locations = instruction_->GetLocations();
467 Register reg_out = out_.AsRegister<Register>();
468 DCHECK(locations->CanCall());
469 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
470 DCHECK(instruction_->IsInstanceFieldGet() ||
471 instruction_->IsStaticFieldGet() ||
472 instruction_->IsArrayGet() ||
473 instruction_->IsLoadClass() ||
474 instruction_->IsLoadString() ||
475 instruction_->IsInstanceOf() ||
476 instruction_->IsCheckCast())
477 << "Unexpected instruction in read barrier marking slow path: "
478 << instruction_->DebugName();
479
480 __ Bind(GetEntryLabel());
481 SaveLiveRegisters(codegen, locations);
482
483 InvokeRuntimeCallingConvention calling_convention;
484 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
485 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
486 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
487 instruction_,
488 instruction_->GetDexPc(),
489 this);
490 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
491 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
492
493 RestoreLiveRegisters(codegen, locations);
494 __ jmp(GetExitLabel());
495 }
496
497 private:
498 HInstruction* const instruction_;
499 const Location out_;
500 const Location obj_;
501
502 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
503};
504
Roland Levillain0d5a2812015-11-13 10:07:31 +0000505// Slow path generating a read barrier for a heap reference.
506class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
507 public:
508 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
509 Location out,
510 Location ref,
511 Location obj,
512 uint32_t offset,
513 Location index)
514 : instruction_(instruction),
515 out_(out),
516 ref_(ref),
517 obj_(obj),
518 offset_(offset),
519 index_(index) {
520 DCHECK(kEmitCompilerReadBarrier);
521 // If `obj` is equal to `out` or `ref`, it means the initial
522 // object has been overwritten by (or after) the heap object
523 // reference load to be instrumented, e.g.:
524 //
525 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000526 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000527 //
528 // In that case, we have lost the information about the original
529 // object, and the emitted read barrier cannot work properly.
530 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
531 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
532}
533
534 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
535 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
536 LocationSummary* locations = instruction_->GetLocations();
537 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
538 DCHECK(locations->CanCall());
539 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
540 DCHECK(!instruction_->IsInvoke() ||
541 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000542 instruction_->GetLocations()->Intrinsified()))
543 << "Unexpected instruction in read barrier for heap reference slow path: "
544 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000545
546 __ Bind(GetEntryLabel());
547 SaveLiveRegisters(codegen, locations);
548
549 // We may have to change the index's value, but as `index_` is a
550 // constant member (like other "inputs" of this slow path),
551 // introduce a copy of it, `index`.
552 Location index = index_;
553 if (index_.IsValid()) {
554 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
555 if (instruction_->IsArrayGet()) {
556 // Compute real offset and store it in index_.
557 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
558 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
559 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
560 // We are about to change the value of `index_reg` (see the
561 // calls to art::x86_64::X86_64Assembler::shll and
562 // art::x86_64::X86_64Assembler::AddImmediate below), but it
563 // has not been saved by the previous call to
564 // art::SlowPathCode::SaveLiveRegisters, as it is a
565 // callee-save register --
566 // art::SlowPathCode::SaveLiveRegisters does not consider
567 // callee-save registers, as it has been designed with the
568 // assumption that callee-save registers are supposed to be
569 // handled by the called function. So, as a callee-save
570 // register, `index_reg` _would_ eventually be saved onto
571 // the stack, but it would be too late: we would have
572 // changed its value earlier. Therefore, we manually save
573 // it here into another freely available register,
574 // `free_reg`, chosen of course among the caller-save
575 // registers (as a callee-save `free_reg` register would
576 // exhibit the same problem).
577 //
578 // Note we could have requested a temporary register from
579 // the register allocator instead; but we prefer not to, as
580 // this is a slow path, and we know we can find a
581 // caller-save register that is available.
582 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
583 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
584 index_reg = free_reg;
585 index = Location::RegisterLocation(index_reg);
586 } else {
587 // The initial register stored in `index_` has already been
588 // saved in the call to art::SlowPathCode::SaveLiveRegisters
589 // (as it is not a callee-save register), so we can freely
590 // use it.
591 }
592 // Shifting the index value contained in `index_reg` by the
593 // scale factor (2) cannot overflow in practice, as the
594 // runtime is unable to allocate object arrays with a size
595 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
596 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
597 static_assert(
598 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
599 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
600 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
601 } else {
602 DCHECK(instruction_->IsInvoke());
603 DCHECK(instruction_->GetLocations()->Intrinsified());
604 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
605 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
606 << instruction_->AsInvoke()->GetIntrinsic();
607 DCHECK_EQ(offset_, 0U);
608 DCHECK(index_.IsRegister());
609 }
610 }
611
612 // We're moving two or three locations to locations that could
613 // overlap, so we need a parallel move resolver.
614 InvokeRuntimeCallingConvention calling_convention;
615 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
616 parallel_move.AddMove(ref_,
617 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
618 Primitive::kPrimNot,
619 nullptr);
620 parallel_move.AddMove(obj_,
621 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
622 Primitive::kPrimNot,
623 nullptr);
624 if (index.IsValid()) {
625 parallel_move.AddMove(index,
626 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
627 Primitive::kPrimInt,
628 nullptr);
629 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
630 } else {
631 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
632 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
633 }
634 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
635 instruction_,
636 instruction_->GetDexPc(),
637 this);
638 CheckEntrypointTypes<
639 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
640 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
641
642 RestoreLiveRegisters(codegen, locations);
643 __ jmp(GetExitLabel());
644 }
645
646 const char* GetDescription() const OVERRIDE {
647 return "ReadBarrierForHeapReferenceSlowPathX86_64";
648 }
649
650 private:
651 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
652 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
653 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
654 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
655 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
656 return static_cast<CpuRegister>(i);
657 }
658 }
659 // We shall never fail to find a free caller-save register, as
660 // there are more than two core caller-save registers on x86-64
661 // (meaning it is possible to find one which is different from
662 // `ref` and `obj`).
663 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
664 LOG(FATAL) << "Could not find a free caller-save register";
665 UNREACHABLE();
666 }
667
668 HInstruction* const instruction_;
669 const Location out_;
670 const Location ref_;
671 const Location obj_;
672 const uint32_t offset_;
673 // An additional location containing an index to an array.
674 // Only used for HArrayGet and the UnsafeGetObject &
675 // UnsafeGetObjectVolatile intrinsics.
676 const Location index_;
677
678 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
679};
680
681// Slow path generating a read barrier for a GC root.
682class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
683 public:
684 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000685 : instruction_(instruction), out_(out), root_(root) {
686 DCHECK(kEmitCompilerReadBarrier);
687 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000688
689 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
690 LocationSummary* locations = instruction_->GetLocations();
691 DCHECK(locations->CanCall());
692 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000693 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
694 << "Unexpected instruction in read barrier for GC root slow path: "
695 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000696
697 __ Bind(GetEntryLabel());
698 SaveLiveRegisters(codegen, locations);
699
700 InvokeRuntimeCallingConvention calling_convention;
701 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
702 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
703 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
704 instruction_,
705 instruction_->GetDexPc(),
706 this);
707 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
708 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
709
710 RestoreLiveRegisters(codegen, locations);
711 __ jmp(GetExitLabel());
712 }
713
714 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
715
716 private:
717 HInstruction* const instruction_;
718 const Location out_;
719 const Location root_;
720
721 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
722};
723
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100724#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100725#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100726
Roland Levillain4fa13f62015-07-06 18:11:54 +0100727inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700728 switch (cond) {
729 case kCondEQ: return kEqual;
730 case kCondNE: return kNotEqual;
731 case kCondLT: return kLess;
732 case kCondLE: return kLessEqual;
733 case kCondGT: return kGreater;
734 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700735 case kCondB: return kBelow;
736 case kCondBE: return kBelowEqual;
737 case kCondA: return kAbove;
738 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700739 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100740 LOG(FATAL) << "Unreachable";
741 UNREACHABLE();
742}
743
Aart Bike9f37602015-10-09 11:15:55 -0700744// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100745inline Condition X86_64FPCondition(IfCondition cond) {
746 switch (cond) {
747 case kCondEQ: return kEqual;
748 case kCondNE: return kNotEqual;
749 case kCondLT: return kBelow;
750 case kCondLE: return kBelowEqual;
751 case kCondGT: return kAbove;
752 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700753 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100754 };
755 LOG(FATAL) << "Unreachable";
756 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700757}
758
Vladimir Markodc151b22015-10-15 18:02:30 +0100759HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
760 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
761 MethodReference target_method ATTRIBUTE_UNUSED) {
762 switch (desired_dispatch_info.code_ptr_location) {
763 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
764 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
765 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
766 return HInvokeStaticOrDirect::DispatchInfo {
767 desired_dispatch_info.method_load_kind,
768 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
769 desired_dispatch_info.method_load_data,
770 0u
771 };
772 default:
773 return desired_dispatch_info;
774 }
775}
776
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800777void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100778 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800779 // All registers are assumed to be correctly set up.
780
Vladimir Marko58155012015-08-19 12:49:41 +0000781 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
782 switch (invoke->GetMethodLoadKind()) {
783 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
784 // temp = thread->string_init_entrypoint
785 __ gs()->movl(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000786 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000787 break;
788 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000789 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000790 break;
791 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
792 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
793 break;
794 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
795 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
796 method_patches_.emplace_back(invoke->GetTargetMethod());
797 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
798 break;
799 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000800 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
801 invoke->GetDexCacheArrayOffset());
Vladimir Marko58155012015-08-19 12:49:41 +0000802 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000803 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko58155012015-08-19 12:49:41 +0000804 // Bind the label at the end of the "movl" insn.
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000805 __ Bind(&pc_relative_dex_cache_patches_.back().label);
Vladimir Marko58155012015-08-19 12:49:41 +0000806 break;
807 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000808 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000809 Register method_reg;
810 CpuRegister reg = temp.AsRegister<CpuRegister>();
811 if (current_method.IsRegister()) {
812 method_reg = current_method.AsRegister<Register>();
813 } else {
814 DCHECK(invoke->GetLocations()->Intrinsified());
815 DCHECK(!current_method.IsValid());
816 method_reg = reg.AsRegister();
817 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
818 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000819 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100820 __ movq(reg,
821 Address(CpuRegister(method_reg),
822 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000823 // temp = temp[index_in_cache]
824 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
825 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
826 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100827 }
Vladimir Marko58155012015-08-19 12:49:41 +0000828 }
829
830 switch (invoke->GetCodePtrLocation()) {
831 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
832 __ call(&frame_entry_label_);
833 break;
834 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
835 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
836 Label* label = &relative_call_patches_.back().label;
837 __ call(label); // Bind to the patch label, override at link time.
838 __ Bind(label); // Bind the label at the end of the "call" insn.
839 break;
840 }
841 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
842 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100843 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
844 LOG(FATAL) << "Unsupported";
845 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000846 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
847 // (callee_method + offset_of_quick_compiled_code)()
848 __ call(Address(callee_method.AsRegister<CpuRegister>(),
849 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
850 kX86_64WordSize).SizeValue()));
851 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000852 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800853
854 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800855}
856
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000857void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
858 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
859 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
860 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000861
862 // Use the calling convention instead of the location of the receiver, as
863 // intrinsics may have put the receiver in a different register. In the intrinsics
864 // slow path, the arguments have been moved to the right place, so here we are
865 // guaranteed that the receiver is the first register of the calling convention.
866 InvokeDexCallingConvention calling_convention;
867 Register receiver = calling_convention.GetRegisterAt(0);
868
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000869 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000871 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000872 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000873 // Instead of simply (possibly) unpoisoning `temp` here, we should
874 // emit a read barrier for the previous class reference load.
875 // However this is not required in practice, as this is an
876 // intermediate/temporary reference and because the current
877 // concurrent copying collector keeps the from-space memory
878 // intact/accessible until the end of the marking phase (the
879 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000880 __ MaybeUnpoisonHeapReference(temp);
881 // temp = temp->GetMethodAt(method_offset);
882 __ movq(temp, Address(temp, method_offset));
883 // call temp->GetEntryPoint();
884 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
885 kX86_64WordSize).SizeValue()));
886}
887
Vladimir Marko58155012015-08-19 12:49:41 +0000888void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
889 DCHECK(linker_patches->empty());
890 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000891 method_patches_.size() +
892 relative_call_patches_.size() +
893 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000894 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000895 // The label points to the end of the "movl" insn but the literal offset for method
896 // patch needs to point to the embedded constant which occupies the last 4 bytes.
897 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000898 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000899 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000900 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
901 info.target_method.dex_file,
902 info.target_method.dex_method_index));
903 }
904 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000905 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000906 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
907 info.target_method.dex_file,
908 info.target_method.dex_method_index));
909 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000910 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
911 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000912 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
913 &info.target_dex_file,
914 info.label.Position(),
915 info.element_offset));
916 }
917}
918
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100919void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100920 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100921}
922
923void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100924 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100925}
926
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100927size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
928 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
929 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100930}
931
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100932size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
933 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
934 return kX86_64WordSize;
935}
936
937size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
938 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
939 return kX86_64WordSize;
940}
941
942size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
943 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
944 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100945}
946
Calin Juravle175dc732015-08-25 15:42:32 +0100947void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
948 HInstruction* instruction,
949 uint32_t dex_pc,
950 SlowPathCode* slow_path) {
951 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
952 instruction,
953 dex_pc,
954 slow_path);
955}
956
957void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100958 HInstruction* instruction,
959 uint32_t dex_pc,
960 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100961 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000962 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100963 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100964}
965
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000966static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000967// Use a fake return address register to mimic Quick.
968static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400969CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000970 const X86_64InstructionSetFeatures& isa_features,
971 const CompilerOptions& compiler_options,
972 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000973 : CodeGenerator(graph,
974 kNumberOfCpuRegisters,
975 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000976 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000977 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
978 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000979 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000980 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
981 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100982 compiler_options,
983 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100984 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100985 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000986 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -0400987 move_resolver_(graph->GetArena(), this),
Mark Mendellf55c3e02015-03-26 21:07:46 -0400988 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +0000989 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +0100990 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
991 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000992 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -0400993 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000994 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
995}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100996
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100997InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
998 CodeGeneratorX86_64* codegen)
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100999 : HGraphVisitor(graph),
1000 assembler_(codegen->GetAssembler()),
1001 codegen_(codegen) {}
1002
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001003Location CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type) const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001004 switch (type) {
1005 case Primitive::kPrimLong:
1006 case Primitive::kPrimByte:
1007 case Primitive::kPrimBoolean:
1008 case Primitive::kPrimChar:
1009 case Primitive::kPrimShort:
1010 case Primitive::kPrimInt:
1011 case Primitive::kPrimNot: {
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001012 size_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001013 return Location::RegisterLocation(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001014 }
1015
1016 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001017 case Primitive::kPrimDouble: {
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001018 size_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfFloatRegisters);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001019 return Location::FpuRegisterLocation(reg);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001020 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001021
1022 case Primitive::kPrimVoid:
1023 LOG(FATAL) << "Unreachable type " << type;
1024 }
1025
Roland Levillain0d5a2812015-11-13 10:07:31 +00001026 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001027}
1028
Nicolas Geoffray98893962015-01-21 12:32:32 +00001029void CodeGeneratorX86_64::SetupBlockedRegisters(bool is_baseline) const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001030 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001031 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001032
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001033 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001034 blocked_core_registers_[TMP] = true;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001035
Nicolas Geoffray98893962015-01-21 12:32:32 +00001036 if (is_baseline) {
1037 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1038 blocked_core_registers_[kCoreCalleeSaves[i]] = true;
1039 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001040 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1041 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1042 }
Nicolas Geoffray98893962015-01-21 12:32:32 +00001043 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001044}
1045
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001046static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001047 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001048}
David Srbecky9d8606d2015-04-12 09:35:32 +01001049
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001050static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001051 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001052}
1053
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001054void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001056 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001057 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001058 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001059 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001060
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001061 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001062 __ testq(CpuRegister(RAX), Address(
1063 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001064 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001065 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001066
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001067 if (HasEmptyFrame()) {
1068 return;
1069 }
1070
Nicolas Geoffray98893962015-01-21 12:32:32 +00001071 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001072 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001073 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001074 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001075 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1076 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001077 }
1078 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001079
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001080 int adjust = GetFrameSize() - GetCoreSpillSize();
1081 __ subq(CpuRegister(RSP), Immediate(adjust));
1082 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001083 uint32_t xmm_spill_location = GetFpuSpillStart();
1084 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001085
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001086 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1087 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001088 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1089 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1090 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001091 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001092 }
1093
Mathieu Chartiere401d142015-04-22 13:56:20 -07001094 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001095 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001096}
1097
1098void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001099 __ cfi().RememberState();
1100 if (!HasEmptyFrame()) {
1101 uint32_t xmm_spill_location = GetFpuSpillStart();
1102 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1103 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1104 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1105 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1106 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1107 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1108 }
1109 }
1110
1111 int adjust = GetFrameSize() - GetCoreSpillSize();
1112 __ addq(CpuRegister(RSP), Immediate(adjust));
1113 __ cfi().AdjustCFAOffset(-adjust);
1114
1115 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1116 Register reg = kCoreCalleeSaves[i];
1117 if (allocated_registers_.ContainsCoreRegister(reg)) {
1118 __ popq(CpuRegister(reg));
1119 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1120 __ cfi().Restore(DWARFReg(reg));
1121 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001122 }
1123 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001124 __ ret();
1125 __ cfi().RestoreState();
1126 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001127}
1128
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001129void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1130 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001131}
1132
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001133Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
1134 switch (load->GetType()) {
1135 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001136 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001137 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001138
1139 case Primitive::kPrimInt:
1140 case Primitive::kPrimNot:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001142 return Location::StackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001143
1144 case Primitive::kPrimBoolean:
1145 case Primitive::kPrimByte:
1146 case Primitive::kPrimChar:
1147 case Primitive::kPrimShort:
1148 case Primitive::kPrimVoid:
1149 LOG(FATAL) << "Unexpected type " << load->GetType();
Andreas Gampe65b798e2015-04-06 09:35:22 -07001150 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001151 }
1152
1153 LOG(FATAL) << "Unreachable";
Andreas Gampe65b798e2015-04-06 09:35:22 -07001154 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001155}
1156
1157void CodeGeneratorX86_64::Move(Location destination, Location source) {
1158 if (source.Equals(destination)) {
1159 return;
1160 }
1161 if (destination.IsRegister()) {
1162 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001163 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001164 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001165 __ movd(destination.AsRegister<CpuRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001166 } else if (source.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001167 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001168 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001169 } else {
1170 DCHECK(source.IsDoubleStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001171 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001172 Address(CpuRegister(RSP), source.GetStackIndex()));
1173 }
1174 } else if (destination.IsFpuRegister()) {
1175 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001176 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001177 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001178 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001179 } else if (source.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001180 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001181 Address(CpuRegister(RSP), source.GetStackIndex()));
1182 } else {
1183 DCHECK(source.IsDoubleStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001184 __ movsd(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001185 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001186 }
1187 } else if (destination.IsStackSlot()) {
1188 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001189 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001190 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001191 } else if (source.IsFpuRegister()) {
1192 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001193 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001194 } else if (source.IsConstant()) {
1195 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001196 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001197 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001198 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001199 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001200 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1201 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001202 }
1203 } else {
1204 DCHECK(destination.IsDoubleStackSlot());
1205 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001206 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001207 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001208 } else if (source.IsFpuRegister()) {
1209 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001210 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001211 } else if (source.IsConstant()) {
1212 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001213 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001214 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001215 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001216 } else {
1217 DCHECK(constant->IsLongConstant());
1218 value = constant->AsLongConstant()->GetValue();
1219 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001220 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001221 } else {
1222 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001223 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1224 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001225 }
1226 }
1227}
1228
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001229void CodeGeneratorX86_64::Move(HInstruction* instruction,
1230 Location location,
1231 HInstruction* move_for) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001232 LocationSummary* locations = instruction->GetLocations();
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001233 if (instruction->IsCurrentMethod()) {
Mathieu Chartiere3b034a2015-05-31 14:29:23 -07001234 Move(location, Location::DoubleStackSlot(kCurrentMethodStackOffset));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001235 } else if (locations != nullptr && locations->Out().Equals(location)) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001236 return;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001237 } else if (locations != nullptr && locations->Out().IsConstant()) {
Calin Juravlea21f5982014-11-13 15:53:04 +00001238 HConstant* const_to_move = locations->Out().GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001239 if (const_to_move->IsIntConstant() || const_to_move->IsNullConstant()) {
1240 Immediate imm(GetInt32ValueOf(const_to_move));
Calin Juravlea21f5982014-11-13 15:53:04 +00001241 if (location.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001242 __ movl(location.AsRegister<CpuRegister>(), imm);
Calin Juravlea21f5982014-11-13 15:53:04 +00001243 } else if (location.IsStackSlot()) {
1244 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
1245 } else {
1246 DCHECK(location.IsConstant());
1247 DCHECK_EQ(location.GetConstant(), const_to_move);
1248 }
1249 } else if (const_to_move->IsLongConstant()) {
1250 int64_t value = const_to_move->AsLongConstant()->GetValue();
1251 if (location.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04001252 Load64BitValue(location.AsRegister<CpuRegister>(), value);
Calin Juravlea21f5982014-11-13 15:53:04 +00001253 } else if (location.IsDoubleStackSlot()) {
Mark Mendellcfa410b2015-05-25 16:02:44 -04001254 Store64BitValueToStack(location, value);
Calin Juravlea21f5982014-11-13 15:53:04 +00001255 } else {
1256 DCHECK(location.IsConstant());
1257 DCHECK_EQ(location.GetConstant(), const_to_move);
1258 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001259 }
Roland Levillain476df552014-10-09 17:51:36 +01001260 } else if (instruction->IsLoadLocal()) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001261 switch (instruction->GetType()) {
1262 case Primitive::kPrimBoolean:
1263 case Primitive::kPrimByte:
1264 case Primitive::kPrimChar:
1265 case Primitive::kPrimShort:
1266 case Primitive::kPrimInt:
1267 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001268 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001269 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
1270 break;
1271
1272 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001273 case Primitive::kPrimDouble:
Roland Levillain199f3362014-11-27 17:15:16 +00001274 Move(location,
1275 Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001276 break;
1277
1278 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001279 LOG(FATAL) << "Unexpected local type " << instruction->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001280 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +00001281 } else if (instruction->IsTemporary()) {
1282 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
1283 Move(location, temp_location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001284 } else {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001285 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001286 switch (instruction->GetType()) {
1287 case Primitive::kPrimBoolean:
1288 case Primitive::kPrimByte:
1289 case Primitive::kPrimChar:
1290 case Primitive::kPrimShort:
1291 case Primitive::kPrimInt:
1292 case Primitive::kPrimNot:
1293 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001294 case Primitive::kPrimFloat:
1295 case Primitive::kPrimDouble:
Calin Juravlea21f5982014-11-13 15:53:04 +00001296 Move(location, locations->Out());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001297 break;
1298
1299 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001300 LOG(FATAL) << "Unexpected type " << instruction->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001301 }
1302 }
1303}
1304
Calin Juravle175dc732015-08-25 15:42:32 +01001305void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1306 DCHECK(location.IsRegister());
1307 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1308}
1309
Calin Juravlee460d1d2015-09-29 04:52:17 +01001310void CodeGeneratorX86_64::MoveLocation(
1311 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1312 Move(dst, src);
1313}
1314
1315void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1316 if (location.IsRegister()) {
1317 locations->AddTemp(location);
1318 } else {
1319 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1320 }
1321}
1322
David Brazdilfc6a86a2015-06-26 10:33:45 +00001323void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001324 DCHECK(!successor->IsExitBlock());
1325
1326 HBasicBlock* block = got->GetBlock();
1327 HInstruction* previous = got->GetPrevious();
1328
1329 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001330 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001331 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1332 return;
1333 }
1334
1335 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1336 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1337 }
1338 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339 __ jmp(codegen_->GetLabelOf(successor));
1340 }
1341}
1342
David Brazdilfc6a86a2015-06-26 10:33:45 +00001343void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1344 got->SetLocations(nullptr);
1345}
1346
1347void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1348 HandleGoto(got, got->GetSuccessor());
1349}
1350
1351void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1352 try_boundary->SetLocations(nullptr);
1353}
1354
1355void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1356 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1357 if (!successor->IsExitBlock()) {
1358 HandleGoto(try_boundary, successor);
1359 }
1360}
1361
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001362void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1363 exit->SetLocations(nullptr);
1364}
1365
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001366void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001367}
1368
Mark Mendellc4701932015-04-10 13:18:51 -04001369void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
1370 Label* true_label,
1371 Label* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001372 if (cond->IsFPConditionTrueIfNaN()) {
1373 __ j(kUnordered, true_label);
1374 } else if (cond->IsFPConditionFalseIfNaN()) {
1375 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001376 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001377 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001378}
1379
David Brazdil0debae72015-11-12 18:37:00 +00001380void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1381 Label* true_target_in,
1382 Label* false_target_in) {
1383 // Generated branching requires both targets to be explicit. If either of the
1384 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1385 Label fallthrough_target;
1386 Label* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1387 Label* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1388
Mark Mendellc4701932015-04-10 13:18:51 -04001389 LocationSummary* locations = condition->GetLocations();
1390 Location left = locations->InAt(0);
1391 Location right = locations->InAt(1);
1392
Mark Mendellc4701932015-04-10 13:18:51 -04001393 Primitive::Type type = condition->InputAt(0)->GetType();
1394 switch (type) {
1395 case Primitive::kPrimLong: {
1396 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1397 if (right.IsConstant()) {
1398 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
1399 if (IsInt<32>(value)) {
1400 if (value == 0) {
1401 __ testq(left_reg, left_reg);
1402 } else {
1403 __ cmpq(left_reg, Immediate(static_cast<int32_t>(value)));
1404 }
1405 } else {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001406 // Value won't fit in a 32-bit integer.
Mark Mendellc4701932015-04-10 13:18:51 -04001407 __ cmpq(left_reg, codegen_->LiteralInt64Address(value));
1408 }
1409 } else if (right.IsDoubleStackSlot()) {
1410 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1411 } else {
1412 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1413 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001414 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Mark Mendellc4701932015-04-10 13:18:51 -04001415 break;
1416 }
1417 case Primitive::kPrimFloat: {
1418 if (right.IsFpuRegister()) {
1419 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1420 } else if (right.IsConstant()) {
1421 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1422 codegen_->LiteralFloatAddress(
1423 right.GetConstant()->AsFloatConstant()->GetValue()));
1424 } else {
1425 DCHECK(right.IsStackSlot());
1426 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1427 Address(CpuRegister(RSP), right.GetStackIndex()));
1428 }
1429 GenerateFPJumps(condition, true_target, false_target);
1430 break;
1431 }
1432 case Primitive::kPrimDouble: {
1433 if (right.IsFpuRegister()) {
1434 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1435 } else if (right.IsConstant()) {
1436 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1437 codegen_->LiteralDoubleAddress(
1438 right.GetConstant()->AsDoubleConstant()->GetValue()));
1439 } else {
1440 DCHECK(right.IsDoubleStackSlot());
1441 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1442 Address(CpuRegister(RSP), right.GetStackIndex()));
1443 }
1444 GenerateFPJumps(condition, true_target, false_target);
1445 break;
1446 }
1447 default:
1448 LOG(FATAL) << "Unexpected condition type " << type;
1449 }
1450
David Brazdil0debae72015-11-12 18:37:00 +00001451 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001452 __ jmp(false_target);
1453 }
David Brazdil0debae72015-11-12 18:37:00 +00001454
1455 if (fallthrough_target.IsLinked()) {
1456 __ Bind(&fallthrough_target);
1457 }
Mark Mendellc4701932015-04-10 13:18:51 -04001458}
1459
David Brazdil0debae72015-11-12 18:37:00 +00001460static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1461 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1462 // are set only strictly before `branch`. We can't use the eflags on long
1463 // conditions if they are materialized due to the complex branching.
1464 return cond->IsCondition() &&
1465 cond->GetNext() == branch &&
1466 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1467}
1468
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001469void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001470 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001471 Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00001472 Label* false_target) {
1473 HInstruction* cond = instruction->InputAt(condition_input_index);
1474
1475 if (true_target == nullptr && false_target == nullptr) {
1476 // Nothing to do. The code always falls through.
1477 return;
1478 } else if (cond->IsIntConstant()) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001479 // Constant condition, statically compared against 1.
David Brazdil0debae72015-11-12 18:37:00 +00001480 if (cond->AsIntConstant()->IsOne()) {
1481 if (true_target != nullptr) {
1482 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001483 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001484 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001485 DCHECK(cond->AsIntConstant()->IsZero());
1486 if (false_target != nullptr) {
1487 __ jmp(false_target);
1488 }
1489 }
1490 return;
1491 }
1492
1493 // The following code generates these patterns:
1494 // (1) true_target == nullptr && false_target != nullptr
1495 // - opposite condition true => branch to false_target
1496 // (2) true_target != nullptr && false_target == nullptr
1497 // - condition true => branch to true_target
1498 // (3) true_target != nullptr && false_target != nullptr
1499 // - condition true => branch to true_target
1500 // - branch to false_target
1501 if (IsBooleanValueOrMaterializedCondition(cond)) {
1502 if (AreEflagsSetFrom(cond, instruction)) {
1503 if (true_target == nullptr) {
1504 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1505 } else {
1506 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1507 }
1508 } else {
1509 // Materialized condition, compare against 0.
1510 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1511 if (lhs.IsRegister()) {
1512 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1513 } else {
1514 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1515 }
1516 if (true_target == nullptr) {
1517 __ j(kEqual, false_target);
1518 } else {
1519 __ j(kNotEqual, true_target);
1520 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001521 }
1522 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001523 // Condition has not been materialized, use its inputs as the
1524 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001525 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001526
David Brazdil0debae72015-11-12 18:37:00 +00001527 // If this is a long or FP comparison that has been folded into
1528 // the HCondition, generate the comparison directly.
1529 Primitive::Type type = condition->InputAt(0)->GetType();
1530 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1531 GenerateCompareTestAndBranch(condition, true_target, false_target);
1532 return;
1533 }
1534
1535 Location lhs = condition->GetLocations()->InAt(0);
1536 Location rhs = condition->GetLocations()->InAt(1);
1537 if (rhs.IsRegister()) {
1538 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1539 } else if (rhs.IsConstant()) {
1540 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
1541 if (constant == 0) {
1542 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001543 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001544 __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant));
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001545 }
1546 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001547 __ cmpl(lhs.AsRegister<CpuRegister>(),
1548 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1549 }
1550 if (true_target == nullptr) {
1551 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1552 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001553 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001554 }
Dave Allison20dfc792014-06-16 20:44:29 -07001555 }
David Brazdil0debae72015-11-12 18:37:00 +00001556
1557 // If neither branch falls through (case 3), the conditional branch to `true_target`
1558 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1559 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001560 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001561 }
1562}
1563
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001564void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001565 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1566 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001567 locations->SetInAt(0, Location::Any());
1568 }
1569}
1570
1571void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001572 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1573 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1574 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1575 nullptr : codegen_->GetLabelOf(true_successor);
1576 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1577 nullptr : codegen_->GetLabelOf(false_successor);
1578 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001579}
1580
1581void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1582 LocationSummary* locations = new (GetGraph()->GetArena())
1583 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001584 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001585 locations->SetInAt(0, Location::Any());
1586 }
1587}
1588
1589void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07001590 SlowPathCode* slow_path = new (GetGraph()->GetArena())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001591 DeoptimizationSlowPathX86_64(deoptimize);
1592 codegen_->AddSlowPath(slow_path);
David Brazdil0debae72015-11-12 18:37:00 +00001593 GenerateTestAndBranch(deoptimize,
1594 /* condition_input_index */ 0,
1595 slow_path->GetEntryLabel(),
1596 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001597}
1598
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001599void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
1600 local->SetLocations(nullptr);
1601}
1602
1603void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
1604 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1605}
1606
1607void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
1608 local->SetLocations(nullptr);
1609}
1610
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001611void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001612 // Nothing to do, this is driven by the code generator.
1613}
1614
1615void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001616 LocationSummary* locations =
1617 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001618 switch (store->InputAt(1)->GetType()) {
1619 case Primitive::kPrimBoolean:
1620 case Primitive::kPrimByte:
1621 case Primitive::kPrimChar:
1622 case Primitive::kPrimShort:
1623 case Primitive::kPrimInt:
1624 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001625 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001626 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1627 break;
1628
1629 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001630 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001631 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1632 break;
1633
1634 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001635 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001636 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001637}
1638
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001639void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001640}
1641
Roland Levillain0d37cd02015-05-27 16:39:19 +01001642void LocationsBuilderX86_64::VisitCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001643 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001644 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001645 // Handle the long/FP comparisons made in instruction simplification.
1646 switch (cond->InputAt(0)->GetType()) {
1647 case Primitive::kPrimLong:
1648 locations->SetInAt(0, Location::RequiresRegister());
1649 locations->SetInAt(1, Location::Any());
1650 break;
1651 case Primitive::kPrimFloat:
1652 case Primitive::kPrimDouble:
1653 locations->SetInAt(0, Location::RequiresFpuRegister());
1654 locations->SetInAt(1, Location::Any());
1655 break;
1656 default:
1657 locations->SetInAt(0, Location::RequiresRegister());
1658 locations->SetInAt(1, Location::Any());
1659 break;
1660 }
Roland Levillain0d37cd02015-05-27 16:39:19 +01001661 if (cond->NeedsMaterialization()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001662 locations->SetOut(Location::RequiresRegister());
1663 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001664}
1665
Roland Levillain0d37cd02015-05-27 16:39:19 +01001666void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* cond) {
Mark Mendellc4701932015-04-10 13:18:51 -04001667 if (!cond->NeedsMaterialization()) {
1668 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001669 }
Mark Mendellc4701932015-04-10 13:18:51 -04001670
1671 LocationSummary* locations = cond->GetLocations();
1672 Location lhs = locations->InAt(0);
1673 Location rhs = locations->InAt(1);
1674 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
1675 Label true_label, false_label;
1676
1677 switch (cond->InputAt(0)->GetType()) {
1678 default:
1679 // Integer case.
1680
1681 // Clear output register: setcc only sets the low byte.
1682 __ xorl(reg, reg);
1683
1684 if (rhs.IsRegister()) {
1685 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1686 } else if (rhs.IsConstant()) {
1687 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
1688 if (constant == 0) {
1689 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1690 } else {
1691 __ cmpl(lhs.AsRegister<CpuRegister>(), Immediate(constant));
1692 }
1693 } else {
1694 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1695 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001696 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001697 return;
1698 case Primitive::kPrimLong:
1699 // Clear output register: setcc only sets the low byte.
1700 __ xorl(reg, reg);
1701
1702 if (rhs.IsRegister()) {
1703 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1704 } else if (rhs.IsConstant()) {
1705 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
1706 if (IsInt<32>(value)) {
1707 if (value == 0) {
1708 __ testq(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1709 } else {
1710 __ cmpq(lhs.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
1711 }
1712 } else {
1713 // Value won't fit in an int.
1714 __ cmpq(lhs.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
1715 }
1716 } else {
1717 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1718 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001719 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001720 return;
1721 case Primitive::kPrimFloat: {
1722 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1723 if (rhs.IsConstant()) {
1724 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1725 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1726 } else if (rhs.IsStackSlot()) {
1727 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1728 } else {
1729 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1730 }
1731 GenerateFPJumps(cond, &true_label, &false_label);
1732 break;
1733 }
1734 case Primitive::kPrimDouble: {
1735 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1736 if (rhs.IsConstant()) {
1737 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1738 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1739 } else if (rhs.IsDoubleStackSlot()) {
1740 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1741 } else {
1742 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1743 }
1744 GenerateFPJumps(cond, &true_label, &false_label);
1745 break;
1746 }
1747 }
1748
1749 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001750 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001751
Roland Levillain4fa13f62015-07-06 18:11:54 +01001752 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001753 __ Bind(&false_label);
1754 __ xorl(reg, reg);
1755 __ jmp(&done_label);
1756
Roland Levillain4fa13f62015-07-06 18:11:54 +01001757 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001758 __ Bind(&true_label);
1759 __ movl(reg, Immediate(1));
1760 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001761}
1762
1763void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
1764 VisitCondition(comp);
1765}
1766
1767void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
1768 VisitCondition(comp);
1769}
1770
1771void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
1772 VisitCondition(comp);
1773}
1774
1775void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
1776 VisitCondition(comp);
1777}
1778
1779void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
1780 VisitCondition(comp);
1781}
1782
1783void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
1784 VisitCondition(comp);
1785}
1786
1787void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1788 VisitCondition(comp);
1789}
1790
1791void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
1792 VisitCondition(comp);
1793}
1794
1795void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
1796 VisitCondition(comp);
1797}
1798
1799void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
1800 VisitCondition(comp);
1801}
1802
1803void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1804 VisitCondition(comp);
1805}
1806
1807void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1808 VisitCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001809}
1810
Aart Bike9f37602015-10-09 11:15:55 -07001811void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
1812 VisitCondition(comp);
1813}
1814
1815void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
1816 VisitCondition(comp);
1817}
1818
1819void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
1820 VisitCondition(comp);
1821}
1822
1823void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
1824 VisitCondition(comp);
1825}
1826
1827void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
1828 VisitCondition(comp);
1829}
1830
1831void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
1832 VisitCondition(comp);
1833}
1834
1835void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
1836 VisitCondition(comp);
1837}
1838
1839void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
1840 VisitCondition(comp);
1841}
1842
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001843void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001844 LocationSummary* locations =
1845 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001846 switch (compare->InputAt(0)->GetType()) {
1847 case Primitive::kPrimLong: {
1848 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001849 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001850 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1851 break;
1852 }
1853 case Primitive::kPrimFloat:
1854 case Primitive::kPrimDouble: {
1855 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001856 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001857 locations->SetOut(Location::RequiresRegister());
1858 break;
1859 }
1860 default:
1861 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1862 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001863}
1864
1865void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001866 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001867 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001868 Location left = locations->InAt(0);
1869 Location right = locations->InAt(1);
1870
Mark Mendell0c9497d2015-08-21 09:30:05 -04001871 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001872 Primitive::Type type = compare->InputAt(0)->GetType();
1873 switch (type) {
1874 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001875 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1876 if (right.IsConstant()) {
1877 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell40741f32015-04-20 22:10:34 -04001878 if (IsInt<32>(value)) {
1879 if (value == 0) {
1880 __ testq(left_reg, left_reg);
1881 } else {
1882 __ cmpq(left_reg, Immediate(static_cast<int32_t>(value)));
1883 }
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001884 } else {
Mark Mendell40741f32015-04-20 22:10:34 -04001885 // Value won't fit in an int.
1886 __ cmpq(left_reg, codegen_->LiteralInt64Address(value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001887 }
Mark Mendell40741f32015-04-20 22:10:34 -04001888 } else if (right.IsDoubleStackSlot()) {
1889 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001890 } else {
1891 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1892 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001893 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001894 }
1895 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001896 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1897 if (right.IsConstant()) {
1898 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1899 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1900 } else if (right.IsStackSlot()) {
1901 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1902 } else {
1903 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1904 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001905 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
1906 break;
1907 }
1908 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001909 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1910 if (right.IsConstant()) {
1911 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1912 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1913 } else if (right.IsDoubleStackSlot()) {
1914 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1915 } else {
1916 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1917 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001918 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
1919 break;
1920 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001921 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001922 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001923 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001924 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001925 __ j(kEqual, &done);
Calin Juravleddb7df22014-11-25 20:56:51 +00001926 __ j(type == Primitive::kPrimLong ? kLess : kBelow, &less); // ucomis{s,d} sets CF (kBelow)
Calin Juravlefd861242014-11-25 20:56:51 +00001927
Calin Juravle91debbc2014-11-26 19:01:09 +00001928 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001929 __ movl(out, Immediate(1));
1930 __ jmp(&done);
1931
1932 __ Bind(&less);
1933 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001934
1935 __ Bind(&done);
1936}
1937
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001938void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001939 LocationSummary* locations =
1940 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001941 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001942}
1943
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001944void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001945 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001946}
1947
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001948void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1949 LocationSummary* locations =
1950 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1951 locations->SetOut(Location::ConstantLocation(constant));
1952}
1953
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001954void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001955 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001956}
1957
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001958void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001959 LocationSummary* locations =
1960 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001961 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001962}
1963
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001964void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001965 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001966}
1967
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001968void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1969 LocationSummary* locations =
1970 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1971 locations->SetOut(Location::ConstantLocation(constant));
1972}
1973
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001974void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001975 // Will be generated at use site.
1976}
1977
1978void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1979 LocationSummary* locations =
1980 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1981 locations->SetOut(Location::ConstantLocation(constant));
1982}
1983
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001984void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1985 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001986 // Will be generated at use site.
1987}
1988
Calin Juravle27df7582015-04-17 19:12:31 +01001989void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1990 memory_barrier->SetLocations(nullptr);
1991}
1992
1993void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001994 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001995}
1996
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001997void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
1998 ret->SetLocations(nullptr);
1999}
2000
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002001void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002002 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002003}
2004
2005void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002006 LocationSummary* locations =
2007 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002008 switch (ret->InputAt(0)->GetType()) {
2009 case Primitive::kPrimBoolean:
2010 case Primitive::kPrimByte:
2011 case Primitive::kPrimChar:
2012 case Primitive::kPrimShort:
2013 case Primitive::kPrimInt:
2014 case Primitive::kPrimNot:
2015 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002016 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017 break;
2018
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002019 case Primitive::kPrimFloat:
2020 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002021 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002022 break;
2023
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002024 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002025 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002026 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002027}
2028
2029void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2030 if (kIsDebugBuild) {
2031 switch (ret->InputAt(0)->GetType()) {
2032 case Primitive::kPrimBoolean:
2033 case Primitive::kPrimByte:
2034 case Primitive::kPrimChar:
2035 case Primitive::kPrimShort:
2036 case Primitive::kPrimInt:
2037 case Primitive::kPrimNot:
2038 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002039 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002040 break;
2041
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002042 case Primitive::kPrimFloat:
2043 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002044 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002045 XMM0);
2046 break;
2047
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002048 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002049 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002050 }
2051 }
2052 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002053}
2054
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002055Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2056 switch (type) {
2057 case Primitive::kPrimBoolean:
2058 case Primitive::kPrimByte:
2059 case Primitive::kPrimChar:
2060 case Primitive::kPrimShort:
2061 case Primitive::kPrimInt:
2062 case Primitive::kPrimNot:
2063 case Primitive::kPrimLong:
2064 return Location::RegisterLocation(RAX);
2065
2066 case Primitive::kPrimVoid:
2067 return Location::NoLocation();
2068
2069 case Primitive::kPrimDouble:
2070 case Primitive::kPrimFloat:
2071 return Location::FpuRegisterLocation(XMM0);
2072 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002073
2074 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002075}
2076
2077Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2078 return Location::RegisterLocation(kMethodRegisterArgument);
2079}
2080
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002081Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002082 switch (type) {
2083 case Primitive::kPrimBoolean:
2084 case Primitive::kPrimByte:
2085 case Primitive::kPrimChar:
2086 case Primitive::kPrimShort:
2087 case Primitive::kPrimInt:
2088 case Primitive::kPrimNot: {
2089 uint32_t index = gp_index_++;
2090 stack_index_++;
2091 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002092 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002093 } else {
2094 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2095 }
2096 }
2097
2098 case Primitive::kPrimLong: {
2099 uint32_t index = gp_index_;
2100 stack_index_ += 2;
2101 if (index < calling_convention.GetNumberOfRegisters()) {
2102 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002103 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002104 } else {
2105 gp_index_ += 2;
2106 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2107 }
2108 }
2109
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002110 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002111 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002112 stack_index_++;
2113 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002114 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002115 } else {
2116 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2117 }
2118 }
2119
2120 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002121 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002122 stack_index_ += 2;
2123 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002124 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002125 } else {
2126 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2127 }
2128 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002129
2130 case Primitive::kPrimVoid:
2131 LOG(FATAL) << "Unexpected parameter type " << type;
2132 break;
2133 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002134 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002135}
2136
Calin Juravle175dc732015-08-25 15:42:32 +01002137void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2138 // The trampoline uses the same calling convention as dex calling conventions,
2139 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2140 // the method_idx.
2141 HandleInvoke(invoke);
2142}
2143
2144void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2145 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2146}
2147
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002148void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002149 // When we do not run baseline, explicit clinit checks triggered by static
2150 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2151 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002152
Mark Mendellfb8d2792015-03-31 22:16:59 -04002153 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002154 if (intrinsic.TryDispatch(invoke)) {
2155 return;
2156 }
2157
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002158 HandleInvoke(invoke);
2159}
2160
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002161static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2162 if (invoke->GetLocations()->Intrinsified()) {
2163 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2164 intrinsic.Dispatch(invoke);
2165 return true;
2166 }
2167 return false;
2168}
2169
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002170void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002171 // When we do not run baseline, explicit clinit checks triggered by static
2172 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2173 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002174
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002175 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2176 return;
2177 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002178
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002179 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002180 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002181 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002182 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002183}
2184
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002185void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002186 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002187 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002188}
2189
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002190void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002191 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002192 if (intrinsic.TryDispatch(invoke)) {
2193 return;
2194 }
2195
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002196 HandleInvoke(invoke);
2197}
2198
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002199void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002200 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2201 return;
2202 }
2203
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002204 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002205 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002206 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002207}
2208
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002209void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2210 HandleInvoke(invoke);
2211 // Add the hidden argument.
2212 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2213}
2214
2215void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2216 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002217 LocationSummary* locations = invoke->GetLocations();
2218 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2219 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002220 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2221 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002222 Location receiver = locations->InAt(0);
2223 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2224
Roland Levillain0d5a2812015-11-13 10:07:31 +00002225 // Set the hidden argument. This is safe to do this here, as RAX
2226 // won't be modified thereafter, before the `call` instruction.
2227 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002228 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002229
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002230 if (receiver.IsStackSlot()) {
2231 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002232 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002233 __ movl(temp, Address(temp, class_offset));
2234 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002235 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002236 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002237 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002238 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002239 // Instead of simply (possibly) unpoisoning `temp` here, we should
2240 // emit a read barrier for the previous class reference load.
2241 // However this is not required in practice, as this is an
2242 // intermediate/temporary reference and because the current
2243 // concurrent copying collector keeps the from-space memory
2244 // intact/accessible until the end of the marking phase (the
2245 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002246 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002247 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002248 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002249 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002250 __ call(Address(temp,
2251 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002252
2253 DCHECK(!codegen_->IsLeafMethod());
2254 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2255}
2256
Roland Levillain88cb1752014-10-20 16:36:47 +01002257void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2258 LocationSummary* locations =
2259 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2260 switch (neg->GetResultType()) {
2261 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002262 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002263 locations->SetInAt(0, Location::RequiresRegister());
2264 locations->SetOut(Location::SameAsFirstInput());
2265 break;
2266
Roland Levillain88cb1752014-10-20 16:36:47 +01002267 case Primitive::kPrimFloat:
2268 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002269 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002270 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002271 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002272 break;
2273
2274 default:
2275 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2276 }
2277}
2278
2279void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2280 LocationSummary* locations = neg->GetLocations();
2281 Location out = locations->Out();
2282 Location in = locations->InAt(0);
2283 switch (neg->GetResultType()) {
2284 case Primitive::kPrimInt:
2285 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002286 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002287 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002288 break;
2289
2290 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002291 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002292 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002293 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002294 break;
2295
Roland Levillain5368c212014-11-27 15:03:41 +00002296 case Primitive::kPrimFloat: {
2297 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002298 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002299 // Implement float negation with an exclusive or with value
2300 // 0x80000000 (mask for bit 31, representing the sign of a
2301 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002302 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002303 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002304 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002305 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002306
Roland Levillain5368c212014-11-27 15:03:41 +00002307 case Primitive::kPrimDouble: {
2308 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002309 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002310 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002311 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002312 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002313 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002314 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002315 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002316 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002317
2318 default:
2319 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2320 }
2321}
2322
Roland Levillaindff1f282014-11-05 14:15:05 +00002323void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2324 LocationSummary* locations =
2325 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2326 Primitive::Type result_type = conversion->GetResultType();
2327 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002328 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002329
David Brazdilb2bd1c52015-03-25 11:17:37 +00002330 // The Java language does not allow treating boolean as an integral type but
2331 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002332
Roland Levillaindff1f282014-11-05 14:15:05 +00002333 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002334 case Primitive::kPrimByte:
2335 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002336 case Primitive::kPrimBoolean:
2337 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002338 case Primitive::kPrimShort:
2339 case Primitive::kPrimInt:
2340 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002341 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002342 locations->SetInAt(0, Location::Any());
2343 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2344 break;
2345
2346 default:
2347 LOG(FATAL) << "Unexpected type conversion from " << input_type
2348 << " to " << result_type;
2349 }
2350 break;
2351
Roland Levillain01a8d712014-11-14 16:27:39 +00002352 case Primitive::kPrimShort:
2353 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002354 case Primitive::kPrimBoolean:
2355 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002356 case Primitive::kPrimByte:
2357 case Primitive::kPrimInt:
2358 case Primitive::kPrimChar:
2359 // Processing a Dex `int-to-short' instruction.
2360 locations->SetInAt(0, Location::Any());
2361 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2362 break;
2363
2364 default:
2365 LOG(FATAL) << "Unexpected type conversion from " << input_type
2366 << " to " << result_type;
2367 }
2368 break;
2369
Roland Levillain946e1432014-11-11 17:35:19 +00002370 case Primitive::kPrimInt:
2371 switch (input_type) {
2372 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002373 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002374 locations->SetInAt(0, Location::Any());
2375 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2376 break;
2377
2378 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002379 // Processing a Dex `float-to-int' instruction.
2380 locations->SetInAt(0, Location::RequiresFpuRegister());
2381 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002382 break;
2383
Roland Levillain946e1432014-11-11 17:35:19 +00002384 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002385 // Processing a Dex `double-to-int' instruction.
2386 locations->SetInAt(0, Location::RequiresFpuRegister());
2387 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002388 break;
2389
2390 default:
2391 LOG(FATAL) << "Unexpected type conversion from " << input_type
2392 << " to " << result_type;
2393 }
2394 break;
2395
Roland Levillaindff1f282014-11-05 14:15:05 +00002396 case Primitive::kPrimLong:
2397 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002398 case Primitive::kPrimBoolean:
2399 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002400 case Primitive::kPrimByte:
2401 case Primitive::kPrimShort:
2402 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002403 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002404 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002405 // TODO: We would benefit from a (to-be-implemented)
2406 // Location::RegisterOrStackSlot requirement for this input.
2407 locations->SetInAt(0, Location::RequiresRegister());
2408 locations->SetOut(Location::RequiresRegister());
2409 break;
2410
2411 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002412 // Processing a Dex `float-to-long' instruction.
2413 locations->SetInAt(0, Location::RequiresFpuRegister());
2414 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002415 break;
2416
Roland Levillaindff1f282014-11-05 14:15:05 +00002417 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002418 // Processing a Dex `double-to-long' instruction.
2419 locations->SetInAt(0, Location::RequiresFpuRegister());
2420 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002421 break;
2422
2423 default:
2424 LOG(FATAL) << "Unexpected type conversion from " << input_type
2425 << " to " << result_type;
2426 }
2427 break;
2428
Roland Levillain981e4542014-11-14 11:47:14 +00002429 case Primitive::kPrimChar:
2430 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002431 case Primitive::kPrimBoolean:
2432 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002433 case Primitive::kPrimByte:
2434 case Primitive::kPrimShort:
2435 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002436 // Processing a Dex `int-to-char' instruction.
2437 locations->SetInAt(0, Location::Any());
2438 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2439 break;
2440
2441 default:
2442 LOG(FATAL) << "Unexpected type conversion from " << input_type
2443 << " to " << result_type;
2444 }
2445 break;
2446
Roland Levillaindff1f282014-11-05 14:15:05 +00002447 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002448 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002449 case Primitive::kPrimBoolean:
2450 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002451 case Primitive::kPrimByte:
2452 case Primitive::kPrimShort:
2453 case Primitive::kPrimInt:
2454 case Primitive::kPrimChar:
2455 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002456 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002457 locations->SetOut(Location::RequiresFpuRegister());
2458 break;
2459
2460 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002461 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002462 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002463 locations->SetOut(Location::RequiresFpuRegister());
2464 break;
2465
Roland Levillaincff13742014-11-17 14:32:17 +00002466 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002467 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002468 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002469 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002470 break;
2471
2472 default:
2473 LOG(FATAL) << "Unexpected type conversion from " << input_type
2474 << " to " << result_type;
2475 };
2476 break;
2477
Roland Levillaindff1f282014-11-05 14:15:05 +00002478 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002479 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002480 case Primitive::kPrimBoolean:
2481 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002482 case Primitive::kPrimByte:
2483 case Primitive::kPrimShort:
2484 case Primitive::kPrimInt:
2485 case Primitive::kPrimChar:
2486 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002487 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002488 locations->SetOut(Location::RequiresFpuRegister());
2489 break;
2490
2491 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002492 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002493 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002494 locations->SetOut(Location::RequiresFpuRegister());
2495 break;
2496
Roland Levillaincff13742014-11-17 14:32:17 +00002497 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002498 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002499 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002500 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002501 break;
2502
2503 default:
2504 LOG(FATAL) << "Unexpected type conversion from " << input_type
2505 << " to " << result_type;
2506 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002507 break;
2508
2509 default:
2510 LOG(FATAL) << "Unexpected type conversion from " << input_type
2511 << " to " << result_type;
2512 }
2513}
2514
2515void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2516 LocationSummary* locations = conversion->GetLocations();
2517 Location out = locations->Out();
2518 Location in = locations->InAt(0);
2519 Primitive::Type result_type = conversion->GetResultType();
2520 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002521 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002522 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002523 case Primitive::kPrimByte:
2524 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002525 case Primitive::kPrimBoolean:
2526 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002527 case Primitive::kPrimShort:
2528 case Primitive::kPrimInt:
2529 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002530 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002531 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002532 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain51d3fc42014-11-13 14:11:42 +00002533 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002534 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002535 Address(CpuRegister(RSP), in.GetStackIndex()));
2536 } else {
2537 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002538 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002539 Immediate(static_cast<int8_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2540 }
2541 break;
2542
2543 default:
2544 LOG(FATAL) << "Unexpected type conversion from " << input_type
2545 << " to " << result_type;
2546 }
2547 break;
2548
Roland Levillain01a8d712014-11-14 16:27:39 +00002549 case Primitive::kPrimShort:
2550 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002551 case Primitive::kPrimBoolean:
2552 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002553 case Primitive::kPrimByte:
2554 case Primitive::kPrimInt:
2555 case Primitive::kPrimChar:
2556 // Processing a Dex `int-to-short' instruction.
2557 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002558 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002559 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002560 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002561 Address(CpuRegister(RSP), in.GetStackIndex()));
2562 } else {
2563 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002564 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002565 Immediate(static_cast<int16_t>(in.GetConstant()->AsIntConstant()->GetValue())));
2566 }
2567 break;
2568
2569 default:
2570 LOG(FATAL) << "Unexpected type conversion from " << input_type
2571 << " to " << result_type;
2572 }
2573 break;
2574
Roland Levillain946e1432014-11-11 17:35:19 +00002575 case Primitive::kPrimInt:
2576 switch (input_type) {
2577 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002578 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002579 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002580 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002581 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002582 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002583 Address(CpuRegister(RSP), in.GetStackIndex()));
2584 } else {
2585 DCHECK(in.IsConstant());
2586 DCHECK(in.GetConstant()->IsLongConstant());
2587 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002588 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002589 }
2590 break;
2591
Roland Levillain3f8f9362014-12-02 17:45:01 +00002592 case Primitive::kPrimFloat: {
2593 // Processing a Dex `float-to-int' instruction.
2594 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2595 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002596 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002597
2598 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002599 // if input >= (float)INT_MAX goto done
2600 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002601 __ j(kAboveEqual, &done);
2602 // if input == NaN goto nan
2603 __ j(kUnordered, &nan);
2604 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002605 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002606 __ jmp(&done);
2607 __ Bind(&nan);
2608 // output = 0
2609 __ xorl(output, output);
2610 __ Bind(&done);
2611 break;
2612 }
2613
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002614 case Primitive::kPrimDouble: {
2615 // Processing a Dex `double-to-int' instruction.
2616 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2617 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002618 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002619
2620 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002621 // if input >= (double)INT_MAX goto done
2622 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002623 __ j(kAboveEqual, &done);
2624 // if input == NaN goto nan
2625 __ j(kUnordered, &nan);
2626 // output = double-to-int-truncate(input)
2627 __ cvttsd2si(output, input);
2628 __ jmp(&done);
2629 __ Bind(&nan);
2630 // output = 0
2631 __ xorl(output, output);
2632 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002633 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002634 }
Roland Levillain946e1432014-11-11 17:35:19 +00002635
2636 default:
2637 LOG(FATAL) << "Unexpected type conversion from " << input_type
2638 << " to " << result_type;
2639 }
2640 break;
2641
Roland Levillaindff1f282014-11-05 14:15:05 +00002642 case Primitive::kPrimLong:
2643 switch (input_type) {
2644 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002645 case Primitive::kPrimBoolean:
2646 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002647 case Primitive::kPrimByte:
2648 case Primitive::kPrimShort:
2649 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002650 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002651 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002652 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002653 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002654 break;
2655
Roland Levillain624279f2014-12-04 11:54:28 +00002656 case Primitive::kPrimFloat: {
2657 // Processing a Dex `float-to-long' instruction.
2658 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2659 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002660 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002661
Mark Mendell92e83bf2015-05-07 11:25:03 -04002662 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002663 // if input >= (float)LONG_MAX goto done
2664 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002665 __ j(kAboveEqual, &done);
2666 // if input == NaN goto nan
2667 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002668 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002669 __ cvttss2si(output, input, true);
2670 __ jmp(&done);
2671 __ Bind(&nan);
2672 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002673 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002674 __ Bind(&done);
2675 break;
2676 }
2677
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002678 case Primitive::kPrimDouble: {
2679 // Processing a Dex `double-to-long' instruction.
2680 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2681 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002682 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002683
Mark Mendell92e83bf2015-05-07 11:25:03 -04002684 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002685 // if input >= (double)LONG_MAX goto done
2686 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002687 __ j(kAboveEqual, &done);
2688 // if input == NaN goto nan
2689 __ j(kUnordered, &nan);
2690 // output = double-to-long-truncate(input)
2691 __ cvttsd2si(output, input, true);
2692 __ jmp(&done);
2693 __ Bind(&nan);
2694 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002695 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002696 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002697 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002698 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002699
2700 default:
2701 LOG(FATAL) << "Unexpected type conversion from " << input_type
2702 << " to " << result_type;
2703 }
2704 break;
2705
Roland Levillain981e4542014-11-14 11:47:14 +00002706 case Primitive::kPrimChar:
2707 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002708 case Primitive::kPrimBoolean:
2709 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002710 case Primitive::kPrimByte:
2711 case Primitive::kPrimShort:
2712 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002713 // Processing a Dex `int-to-char' instruction.
2714 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002715 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain981e4542014-11-14 11:47:14 +00002716 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002717 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002718 Address(CpuRegister(RSP), in.GetStackIndex()));
2719 } else {
2720 DCHECK(in.GetConstant()->IsIntConstant());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002721 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002722 Immediate(static_cast<uint16_t>(
2723 in.GetConstant()->AsIntConstant()->GetValue())));
Roland Levillain981e4542014-11-14 11:47:14 +00002724 }
2725 break;
2726
2727 default:
2728 LOG(FATAL) << "Unexpected type conversion from " << input_type
2729 << " to " << result_type;
2730 }
2731 break;
2732
Roland Levillaindff1f282014-11-05 14:15:05 +00002733 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002734 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002735 case Primitive::kPrimBoolean:
2736 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002737 case Primitive::kPrimByte:
2738 case Primitive::kPrimShort:
2739 case Primitive::kPrimInt:
2740 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002741 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002742 if (in.IsRegister()) {
2743 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2744 } else if (in.IsConstant()) {
2745 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2746 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2747 if (v == 0) {
2748 __ xorps(dest, dest);
2749 } else {
2750 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2751 }
2752 } else {
2753 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2754 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2755 }
Roland Levillaincff13742014-11-17 14:32:17 +00002756 break;
2757
2758 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002759 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002760 if (in.IsRegister()) {
2761 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2762 } else if (in.IsConstant()) {
2763 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2764 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2765 if (v == 0) {
2766 __ xorps(dest, dest);
2767 } else {
2768 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2769 }
2770 } else {
2771 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2772 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2773 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002774 break;
2775
Roland Levillaincff13742014-11-17 14:32:17 +00002776 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002777 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002778 if (in.IsFpuRegister()) {
2779 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2780 } else if (in.IsConstant()) {
2781 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2782 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2783 if (bit_cast<int64_t, double>(v) == 0) {
2784 __ xorps(dest, dest);
2785 } else {
2786 __ movss(dest, codegen_->LiteralFloatAddress(static_cast<float>(v)));
2787 }
2788 } else {
2789 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2790 Address(CpuRegister(RSP), in.GetStackIndex()));
2791 }
Roland Levillaincff13742014-11-17 14:32:17 +00002792 break;
2793
2794 default:
2795 LOG(FATAL) << "Unexpected type conversion from " << input_type
2796 << " to " << result_type;
2797 };
2798 break;
2799
Roland Levillaindff1f282014-11-05 14:15:05 +00002800 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002801 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002802 case Primitive::kPrimBoolean:
2803 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002804 case Primitive::kPrimByte:
2805 case Primitive::kPrimShort:
2806 case Primitive::kPrimInt:
2807 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002808 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002809 if (in.IsRegister()) {
2810 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2811 } else if (in.IsConstant()) {
2812 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2813 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2814 if (v == 0) {
2815 __ xorpd(dest, dest);
2816 } else {
2817 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2818 }
2819 } else {
2820 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2821 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2822 }
Roland Levillaincff13742014-11-17 14:32:17 +00002823 break;
2824
2825 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002826 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002827 if (in.IsRegister()) {
2828 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2829 } else if (in.IsConstant()) {
2830 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2831 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2832 if (v == 0) {
2833 __ xorpd(dest, dest);
2834 } else {
2835 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2836 }
2837 } else {
2838 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2839 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2840 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002841 break;
2842
Roland Levillaincff13742014-11-17 14:32:17 +00002843 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002844 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002845 if (in.IsFpuRegister()) {
2846 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2847 } else if (in.IsConstant()) {
2848 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2849 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
2850 if (bit_cast<int32_t, float>(v) == 0) {
2851 __ xorpd(dest, dest);
2852 } else {
2853 __ movsd(dest, codegen_->LiteralDoubleAddress(static_cast<double>(v)));
2854 }
2855 } else {
2856 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2857 Address(CpuRegister(RSP), in.GetStackIndex()));
2858 }
Roland Levillaincff13742014-11-17 14:32:17 +00002859 break;
2860
2861 default:
2862 LOG(FATAL) << "Unexpected type conversion from " << input_type
2863 << " to " << result_type;
2864 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002865 break;
2866
2867 default:
2868 LOG(FATAL) << "Unexpected type conversion from " << input_type
2869 << " to " << result_type;
2870 }
2871}
2872
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002873void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002874 LocationSummary* locations =
2875 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002876 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002877 case Primitive::kPrimInt: {
2878 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002879 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2880 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002881 break;
2882 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002883
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002884 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002885 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002886 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002887 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002888 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002889 break;
2890 }
2891
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002892 case Primitive::kPrimDouble:
2893 case Primitive::kPrimFloat: {
2894 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002895 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002896 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002897 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002898 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002899
2900 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002901 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002902 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002903}
2904
2905void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2906 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002907 Location first = locations->InAt(0);
2908 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002909 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002910
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002911 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002912 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002913 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002914 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2915 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002916 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2917 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002918 } else {
2919 __ leal(out.AsRegister<CpuRegister>(), Address(
2920 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2921 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002922 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002923 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2924 __ addl(out.AsRegister<CpuRegister>(),
2925 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2926 } else {
2927 __ leal(out.AsRegister<CpuRegister>(), Address(
2928 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2929 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002930 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002931 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002932 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002933 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002934 break;
2935 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002936
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002937 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002938 if (second.IsRegister()) {
2939 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2940 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002941 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2942 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002943 } else {
2944 __ leaq(out.AsRegister<CpuRegister>(), Address(
2945 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2946 }
2947 } else {
2948 DCHECK(second.IsConstant());
2949 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2950 int32_t int32_value = Low32Bits(value);
2951 DCHECK_EQ(int32_value, value);
2952 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2953 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2954 } else {
2955 __ leaq(out.AsRegister<CpuRegister>(), Address(
2956 first.AsRegister<CpuRegister>(), int32_value));
2957 }
2958 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002959 break;
2960 }
2961
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002962 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002963 if (second.IsFpuRegister()) {
2964 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2965 } else if (second.IsConstant()) {
2966 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002967 codegen_->LiteralFloatAddress(
2968 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002969 } else {
2970 DCHECK(second.IsStackSlot());
2971 __ addss(first.AsFpuRegister<XmmRegister>(),
2972 Address(CpuRegister(RSP), second.GetStackIndex()));
2973 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002974 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002975 }
2976
2977 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002978 if (second.IsFpuRegister()) {
2979 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2980 } else if (second.IsConstant()) {
2981 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002982 codegen_->LiteralDoubleAddress(
2983 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002984 } else {
2985 DCHECK(second.IsDoubleStackSlot());
2986 __ addsd(first.AsFpuRegister<XmmRegister>(),
2987 Address(CpuRegister(RSP), second.GetStackIndex()));
2988 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002989 break;
2990 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002991
2992 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002993 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002994 }
2995}
2996
2997void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002998 LocationSummary* locations =
2999 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003000 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003001 case Primitive::kPrimInt: {
3002 locations->SetInAt(0, Location::RequiresRegister());
3003 locations->SetInAt(1, Location::Any());
3004 locations->SetOut(Location::SameAsFirstInput());
3005 break;
3006 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003007 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003008 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003009 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003010 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003011 break;
3012 }
Calin Juravle11351682014-10-23 15:38:15 +01003013 case Primitive::kPrimFloat:
3014 case Primitive::kPrimDouble: {
3015 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003016 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003017 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003018 break;
Calin Juravle11351682014-10-23 15:38:15 +01003019 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003020 default:
Calin Juravle11351682014-10-23 15:38:15 +01003021 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003022 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003023}
3024
3025void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3026 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003027 Location first = locations->InAt(0);
3028 Location second = locations->InAt(1);
3029 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003030 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003031 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003032 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003033 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003034 } else if (second.IsConstant()) {
3035 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003036 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003037 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003038 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003039 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003040 break;
3041 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003042 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003043 if (second.IsConstant()) {
3044 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3045 DCHECK(IsInt<32>(value));
3046 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3047 } else {
3048 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3049 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003050 break;
3051 }
3052
Calin Juravle11351682014-10-23 15:38:15 +01003053 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003054 if (second.IsFpuRegister()) {
3055 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3056 } else if (second.IsConstant()) {
3057 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003058 codegen_->LiteralFloatAddress(
3059 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003060 } else {
3061 DCHECK(second.IsStackSlot());
3062 __ subss(first.AsFpuRegister<XmmRegister>(),
3063 Address(CpuRegister(RSP), second.GetStackIndex()));
3064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065 break;
Calin Juravle11351682014-10-23 15:38:15 +01003066 }
3067
3068 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003069 if (second.IsFpuRegister()) {
3070 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3071 } else if (second.IsConstant()) {
3072 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003073 codegen_->LiteralDoubleAddress(
3074 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003075 } else {
3076 DCHECK(second.IsDoubleStackSlot());
3077 __ subsd(first.AsFpuRegister<XmmRegister>(),
3078 Address(CpuRegister(RSP), second.GetStackIndex()));
3079 }
Calin Juravle11351682014-10-23 15:38:15 +01003080 break;
3081 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003082
3083 default:
Calin Juravle11351682014-10-23 15:38:15 +01003084 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003085 }
3086}
3087
Calin Juravle34bacdf2014-10-07 20:23:36 +01003088void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3089 LocationSummary* locations =
3090 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3091 switch (mul->GetResultType()) {
3092 case Primitive::kPrimInt: {
3093 locations->SetInAt(0, Location::RequiresRegister());
3094 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003095 if (mul->InputAt(1)->IsIntConstant()) {
3096 // Can use 3 operand multiply.
3097 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3098 } else {
3099 locations->SetOut(Location::SameAsFirstInput());
3100 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003101 break;
3102 }
3103 case Primitive::kPrimLong: {
3104 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003105 locations->SetInAt(1, Location::Any());
3106 if (mul->InputAt(1)->IsLongConstant() &&
3107 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003108 // Can use 3 operand multiply.
3109 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3110 } else {
3111 locations->SetOut(Location::SameAsFirstInput());
3112 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003113 break;
3114 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003115 case Primitive::kPrimFloat:
3116 case Primitive::kPrimDouble: {
3117 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003118 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003119 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003120 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003121 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003122
3123 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003124 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003125 }
3126}
3127
3128void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3129 LocationSummary* locations = mul->GetLocations();
3130 Location first = locations->InAt(0);
3131 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003132 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003133 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003134 case Primitive::kPrimInt:
3135 // The constant may have ended up in a register, so test explicitly to avoid
3136 // problems where the output may not be the same as the first operand.
3137 if (mul->InputAt(1)->IsIntConstant()) {
3138 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3139 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3140 } else if (second.IsRegister()) {
3141 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003142 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003143 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003144 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003145 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003146 __ imull(first.AsRegister<CpuRegister>(),
3147 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148 }
3149 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003150 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003151 // The constant may have ended up in a register, so test explicitly to avoid
3152 // problems where the output may not be the same as the first operand.
3153 if (mul->InputAt(1)->IsLongConstant()) {
3154 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3155 if (IsInt<32>(value)) {
3156 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3157 Immediate(static_cast<int32_t>(value)));
3158 } else {
3159 // Have to use the constant area.
3160 DCHECK(first.Equals(out));
3161 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3162 }
3163 } else if (second.IsRegister()) {
3164 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003165 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003166 } else {
3167 DCHECK(second.IsDoubleStackSlot());
3168 DCHECK(first.Equals(out));
3169 __ imulq(first.AsRegister<CpuRegister>(),
3170 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003171 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003172 break;
3173 }
3174
Calin Juravleb5bfa962014-10-21 18:02:24 +01003175 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003176 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003177 if (second.IsFpuRegister()) {
3178 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3179 } else if (second.IsConstant()) {
3180 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003181 codegen_->LiteralFloatAddress(
3182 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003183 } else {
3184 DCHECK(second.IsStackSlot());
3185 __ mulss(first.AsFpuRegister<XmmRegister>(),
3186 Address(CpuRegister(RSP), second.GetStackIndex()));
3187 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003188 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003189 }
3190
3191 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003192 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003193 if (second.IsFpuRegister()) {
3194 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3195 } else if (second.IsConstant()) {
3196 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003197 codegen_->LiteralDoubleAddress(
3198 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003199 } else {
3200 DCHECK(second.IsDoubleStackSlot());
3201 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3202 Address(CpuRegister(RSP), second.GetStackIndex()));
3203 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003204 break;
3205 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003206
3207 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003208 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003209 }
3210}
3211
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003212void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3213 uint32_t stack_adjustment, bool is_float) {
3214 if (source.IsStackSlot()) {
3215 DCHECK(is_float);
3216 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3217 } else if (source.IsDoubleStackSlot()) {
3218 DCHECK(!is_float);
3219 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3220 } else {
3221 // Write the value to the temporary location on the stack and load to FP stack.
3222 if (is_float) {
3223 Location stack_temp = Location::StackSlot(temp_offset);
3224 codegen_->Move(stack_temp, source);
3225 __ flds(Address(CpuRegister(RSP), temp_offset));
3226 } else {
3227 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3228 codegen_->Move(stack_temp, source);
3229 __ fldl(Address(CpuRegister(RSP), temp_offset));
3230 }
3231 }
3232}
3233
3234void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3235 Primitive::Type type = rem->GetResultType();
3236 bool is_float = type == Primitive::kPrimFloat;
3237 size_t elem_size = Primitive::ComponentSize(type);
3238 LocationSummary* locations = rem->GetLocations();
3239 Location first = locations->InAt(0);
3240 Location second = locations->InAt(1);
3241 Location out = locations->Out();
3242
3243 // Create stack space for 2 elements.
3244 // TODO: enhance register allocator to ask for stack temporaries.
3245 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3246
3247 // Load the values to the FP stack in reverse order, using temporaries if needed.
3248 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3249 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3250
3251 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003252 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003253 __ Bind(&retry);
3254 __ fprem();
3255
3256 // Move FP status to AX.
3257 __ fstsw();
3258
3259 // And see if the argument reduction is complete. This is signaled by the
3260 // C2 FPU flag bit set to 0.
3261 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3262 __ j(kNotEqual, &retry);
3263
3264 // We have settled on the final value. Retrieve it into an XMM register.
3265 // Store FP top of stack to real stack.
3266 if (is_float) {
3267 __ fsts(Address(CpuRegister(RSP), 0));
3268 } else {
3269 __ fstl(Address(CpuRegister(RSP), 0));
3270 }
3271
3272 // Pop the 2 items from the FP stack.
3273 __ fucompp();
3274
3275 // Load the value from the stack into an XMM register.
3276 DCHECK(out.IsFpuRegister()) << out;
3277 if (is_float) {
3278 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3279 } else {
3280 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3281 }
3282
3283 // And remove the temporary stack space we allocated.
3284 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3285}
3286
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003287void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3288 DCHECK(instruction->IsDiv() || instruction->IsRem());
3289
3290 LocationSummary* locations = instruction->GetLocations();
3291 Location second = locations->InAt(1);
3292 DCHECK(second.IsConstant());
3293
3294 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3295 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003296 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003297
3298 DCHECK(imm == 1 || imm == -1);
3299
3300 switch (instruction->GetResultType()) {
3301 case Primitive::kPrimInt: {
3302 if (instruction->IsRem()) {
3303 __ xorl(output_register, output_register);
3304 } else {
3305 __ movl(output_register, input_register);
3306 if (imm == -1) {
3307 __ negl(output_register);
3308 }
3309 }
3310 break;
3311 }
3312
3313 case Primitive::kPrimLong: {
3314 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003315 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003316 } else {
3317 __ movq(output_register, input_register);
3318 if (imm == -1) {
3319 __ negq(output_register);
3320 }
3321 }
3322 break;
3323 }
3324
3325 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003326 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003327 }
3328}
3329
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003330void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003331 LocationSummary* locations = instruction->GetLocations();
3332 Location second = locations->InAt(1);
3333
3334 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3335 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3336
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003337 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003338
3339 DCHECK(IsPowerOfTwo(std::abs(imm)));
3340
3341 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3342
3343 if (instruction->GetResultType() == Primitive::kPrimInt) {
3344 __ leal(tmp, Address(numerator, std::abs(imm) - 1));
3345 __ testl(numerator, numerator);
3346 __ cmov(kGreaterEqual, tmp, numerator);
3347 int shift = CTZ(imm);
3348 __ sarl(tmp, Immediate(shift));
3349
3350 if (imm < 0) {
3351 __ negl(tmp);
3352 }
3353
3354 __ movl(output_register, tmp);
3355 } else {
3356 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3357 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3358
Mark Mendell92e83bf2015-05-07 11:25:03 -04003359 codegen_->Load64BitValue(rdx, std::abs(imm) - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003360 __ addq(rdx, numerator);
3361 __ testq(numerator, numerator);
3362 __ cmov(kGreaterEqual, rdx, numerator);
3363 int shift = CTZ(imm);
3364 __ sarq(rdx, Immediate(shift));
3365
3366 if (imm < 0) {
3367 __ negq(rdx);
3368 }
3369
3370 __ movq(output_register, rdx);
3371 }
3372}
3373
3374void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3375 DCHECK(instruction->IsDiv() || instruction->IsRem());
3376
3377 LocationSummary* locations = instruction->GetLocations();
3378 Location second = locations->InAt(1);
3379
3380 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3381 : locations->GetTemp(0).AsRegister<CpuRegister>();
3382 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3383 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3384 : locations->Out().AsRegister<CpuRegister>();
3385 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3386
3387 DCHECK_EQ(RAX, eax.AsRegister());
3388 DCHECK_EQ(RDX, edx.AsRegister());
3389 if (instruction->IsDiv()) {
3390 DCHECK_EQ(RAX, out.AsRegister());
3391 } else {
3392 DCHECK_EQ(RDX, out.AsRegister());
3393 }
3394
3395 int64_t magic;
3396 int shift;
3397
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003398 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003399 if (instruction->GetResultType() == Primitive::kPrimInt) {
3400 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3401
3402 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3403
3404 __ movl(numerator, eax);
3405
Mark Mendell0c9497d2015-08-21 09:30:05 -04003406 NearLabel no_div;
3407 NearLabel end;
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003408 __ testl(eax, eax);
3409 __ j(kNotEqual, &no_div);
3410
3411 __ xorl(out, out);
3412 __ jmp(&end);
3413
3414 __ Bind(&no_div);
3415
3416 __ movl(eax, Immediate(magic));
3417 __ imull(numerator);
3418
3419 if (imm > 0 && magic < 0) {
3420 __ addl(edx, numerator);
3421 } else if (imm < 0 && magic > 0) {
3422 __ subl(edx, numerator);
3423 }
3424
3425 if (shift != 0) {
3426 __ sarl(edx, Immediate(shift));
3427 }
3428
3429 __ movl(eax, edx);
3430 __ shrl(edx, Immediate(31));
3431 __ addl(edx, eax);
3432
3433 if (instruction->IsRem()) {
3434 __ movl(eax, numerator);
3435 __ imull(edx, Immediate(imm));
3436 __ subl(eax, edx);
3437 __ movl(edx, eax);
3438 } else {
3439 __ movl(eax, edx);
3440 }
3441 __ Bind(&end);
3442 } else {
3443 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3444
3445 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3446
3447 CpuRegister rax = eax;
3448 CpuRegister rdx = edx;
3449
3450 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3451
3452 // Save the numerator.
3453 __ movq(numerator, rax);
3454
3455 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003456 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003457
3458 // RDX:RAX = magic * numerator
3459 __ imulq(numerator);
3460
3461 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003462 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003463 __ addq(rdx, numerator);
3464 } else if (imm < 0 && magic > 0) {
3465 // RDX -= numerator
3466 __ subq(rdx, numerator);
3467 }
3468
3469 // Shift if needed.
3470 if (shift != 0) {
3471 __ sarq(rdx, Immediate(shift));
3472 }
3473
3474 // RDX += 1 if RDX < 0
3475 __ movq(rax, rdx);
3476 __ shrq(rdx, Immediate(63));
3477 __ addq(rdx, rax);
3478
3479 if (instruction->IsRem()) {
3480 __ movq(rax, numerator);
3481
3482 if (IsInt<32>(imm)) {
3483 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3484 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003485 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003486 }
3487
3488 __ subq(rax, rdx);
3489 __ movq(rdx, rax);
3490 } else {
3491 __ movq(rax, rdx);
3492 }
3493 }
3494}
3495
Calin Juravlebacfec32014-11-14 15:54:36 +00003496void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3497 DCHECK(instruction->IsDiv() || instruction->IsRem());
3498 Primitive::Type type = instruction->GetResultType();
3499 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3500
3501 bool is_div = instruction->IsDiv();
3502 LocationSummary* locations = instruction->GetLocations();
3503
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003504 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3505 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003506
Roland Levillain271ab9c2014-11-27 15:23:57 +00003507 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003508 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003509
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003510 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003511 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003512
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 if (imm == 0) {
3514 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3515 } else if (imm == 1 || imm == -1) {
3516 DivRemOneOrMinusOne(instruction);
3517 } else if (instruction->IsDiv() && IsPowerOfTwo(std::abs(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003518 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003519 } else {
3520 DCHECK(imm <= -2 || imm >= 2);
3521 GenerateDivRemWithAnyConstant(instruction);
3522 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003523 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003524 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003525 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
3526 out.AsRegister(), type, is_div);
3527 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003528
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003529 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3530 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3531 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3532 // so it's safe to just use negl instead of more complex comparisons.
3533 if (type == Primitive::kPrimInt) {
3534 __ cmpl(second_reg, Immediate(-1));
3535 __ j(kEqual, slow_path->GetEntryLabel());
3536 // edx:eax <- sign-extended of eax
3537 __ cdq();
3538 // eax = quotient, edx = remainder
3539 __ idivl(second_reg);
3540 } else {
3541 __ cmpq(second_reg, Immediate(-1));
3542 __ j(kEqual, slow_path->GetEntryLabel());
3543 // rdx:rax <- sign-extended of rax
3544 __ cqo();
3545 // rax = quotient, rdx = remainder
3546 __ idivq(second_reg);
3547 }
3548 __ Bind(slow_path->GetExitLabel());
3549 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003550}
3551
Calin Juravle7c4954d2014-10-28 16:57:40 +00003552void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3553 LocationSummary* locations =
3554 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3555 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003556 case Primitive::kPrimInt:
3557 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003558 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003559 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003560 locations->SetOut(Location::SameAsFirstInput());
3561 // Intel uses edx:eax as the dividend.
3562 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003563 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3564 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3565 // output and request another temp.
3566 if (div->InputAt(1)->IsConstant()) {
3567 locations->AddTemp(Location::RequiresRegister());
3568 }
Calin Juravled0d48522014-11-04 16:40:20 +00003569 break;
3570 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003571
Calin Juravle7c4954d2014-10-28 16:57:40 +00003572 case Primitive::kPrimFloat:
3573 case Primitive::kPrimDouble: {
3574 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003575 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003576 locations->SetOut(Location::SameAsFirstInput());
3577 break;
3578 }
3579
3580 default:
3581 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3582 }
3583}
3584
3585void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3586 LocationSummary* locations = div->GetLocations();
3587 Location first = locations->InAt(0);
3588 Location second = locations->InAt(1);
3589 DCHECK(first.Equals(locations->Out()));
3590
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003591 Primitive::Type type = div->GetResultType();
3592 switch (type) {
3593 case Primitive::kPrimInt:
3594 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003595 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003596 break;
3597 }
3598
Calin Juravle7c4954d2014-10-28 16:57:40 +00003599 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003600 if (second.IsFpuRegister()) {
3601 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3602 } else if (second.IsConstant()) {
3603 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003604 codegen_->LiteralFloatAddress(
3605 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003606 } else {
3607 DCHECK(second.IsStackSlot());
3608 __ divss(first.AsFpuRegister<XmmRegister>(),
3609 Address(CpuRegister(RSP), second.GetStackIndex()));
3610 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003611 break;
3612 }
3613
3614 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003615 if (second.IsFpuRegister()) {
3616 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3617 } else if (second.IsConstant()) {
3618 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003619 codegen_->LiteralDoubleAddress(
3620 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003621 } else {
3622 DCHECK(second.IsDoubleStackSlot());
3623 __ divsd(first.AsFpuRegister<XmmRegister>(),
3624 Address(CpuRegister(RSP), second.GetStackIndex()));
3625 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003626 break;
3627 }
3628
3629 default:
3630 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3631 }
3632}
3633
Calin Juravlebacfec32014-11-14 15:54:36 +00003634void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003635 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003636 LocationSummary* locations =
3637 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003638
3639 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003640 case Primitive::kPrimInt:
3641 case Primitive::kPrimLong: {
3642 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003643 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003644 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3645 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003646 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3647 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3648 // output and request another temp.
3649 if (rem->InputAt(1)->IsConstant()) {
3650 locations->AddTemp(Location::RequiresRegister());
3651 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003652 break;
3653 }
3654
3655 case Primitive::kPrimFloat:
3656 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003657 locations->SetInAt(0, Location::Any());
3658 locations->SetInAt(1, Location::Any());
3659 locations->SetOut(Location::RequiresFpuRegister());
3660 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003661 break;
3662 }
3663
3664 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003665 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003666 }
3667}
3668
3669void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3670 Primitive::Type type = rem->GetResultType();
3671 switch (type) {
3672 case Primitive::kPrimInt:
3673 case Primitive::kPrimLong: {
3674 GenerateDivRemIntegral(rem);
3675 break;
3676 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003677 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003678 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003679 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003680 break;
3681 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003682 default:
3683 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3684 }
3685}
3686
Calin Juravled0d48522014-11-04 16:40:20 +00003687void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003688 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3689 ? LocationSummary::kCallOnSlowPath
3690 : LocationSummary::kNoCall;
3691 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003692 locations->SetInAt(0, Location::Any());
3693 if (instruction->HasUses()) {
3694 locations->SetOut(Location::SameAsFirstInput());
3695 }
3696}
3697
3698void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003699 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003700 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3701 codegen_->AddSlowPath(slow_path);
3702
3703 LocationSummary* locations = instruction->GetLocations();
3704 Location value = locations->InAt(0);
3705
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003706 switch (instruction->GetType()) {
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003707 case Primitive::kPrimByte:
3708 case Primitive::kPrimChar:
3709 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003710 case Primitive::kPrimInt: {
3711 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003712 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003713 __ j(kEqual, slow_path->GetEntryLabel());
3714 } else if (value.IsStackSlot()) {
3715 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3716 __ j(kEqual, slow_path->GetEntryLabel());
3717 } else {
3718 DCHECK(value.IsConstant()) << value;
3719 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3720 __ jmp(slow_path->GetEntryLabel());
3721 }
3722 }
3723 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003724 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003725 case Primitive::kPrimLong: {
3726 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003727 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003728 __ j(kEqual, slow_path->GetEntryLabel());
3729 } else if (value.IsDoubleStackSlot()) {
3730 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3731 __ j(kEqual, slow_path->GetEntryLabel());
3732 } else {
3733 DCHECK(value.IsConstant()) << value;
3734 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3735 __ jmp(slow_path->GetEntryLabel());
3736 }
3737 }
3738 break;
3739 }
3740 default:
3741 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003742 }
Calin Juravled0d48522014-11-04 16:40:20 +00003743}
3744
Calin Juravle9aec02f2014-11-18 23:06:35 +00003745void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3746 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3747
3748 LocationSummary* locations =
3749 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3750
3751 switch (op->GetResultType()) {
3752 case Primitive::kPrimInt:
3753 case Primitive::kPrimLong: {
3754 locations->SetInAt(0, Location::RequiresRegister());
3755 // The shift count needs to be in CL.
3756 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3757 locations->SetOut(Location::SameAsFirstInput());
3758 break;
3759 }
3760 default:
3761 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3762 }
3763}
3764
3765void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3766 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3767
3768 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003769 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003770 Location second = locations->InAt(1);
3771
3772 switch (op->GetResultType()) {
3773 case Primitive::kPrimInt: {
3774 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003775 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003776 if (op->IsShl()) {
3777 __ shll(first_reg, second_reg);
3778 } else if (op->IsShr()) {
3779 __ sarl(first_reg, second_reg);
3780 } else {
3781 __ shrl(first_reg, second_reg);
3782 }
3783 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003784 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003785 if (op->IsShl()) {
3786 __ shll(first_reg, imm);
3787 } else if (op->IsShr()) {
3788 __ sarl(first_reg, imm);
3789 } else {
3790 __ shrl(first_reg, imm);
3791 }
3792 }
3793 break;
3794 }
3795 case Primitive::kPrimLong: {
3796 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003797 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003798 if (op->IsShl()) {
3799 __ shlq(first_reg, second_reg);
3800 } else if (op->IsShr()) {
3801 __ sarq(first_reg, second_reg);
3802 } else {
3803 __ shrq(first_reg, second_reg);
3804 }
3805 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003806 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003807 if (op->IsShl()) {
3808 __ shlq(first_reg, imm);
3809 } else if (op->IsShr()) {
3810 __ sarq(first_reg, imm);
3811 } else {
3812 __ shrq(first_reg, imm);
3813 }
3814 }
3815 break;
3816 }
3817 default:
3818 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003819 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003820 }
3821}
3822
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003823void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3824 LocationSummary* locations =
3825 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3826
3827 switch (ror->GetResultType()) {
3828 case Primitive::kPrimInt:
3829 case Primitive::kPrimLong: {
3830 locations->SetInAt(0, Location::RequiresRegister());
3831 // The shift count needs to be in CL (unless it is a constant).
3832 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3833 locations->SetOut(Location::SameAsFirstInput());
3834 break;
3835 }
3836 default:
3837 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3838 UNREACHABLE();
3839 }
3840}
3841
3842void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3843 LocationSummary* locations = ror->GetLocations();
3844 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3845 Location second = locations->InAt(1);
3846
3847 switch (ror->GetResultType()) {
3848 case Primitive::kPrimInt:
3849 if (second.IsRegister()) {
3850 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3851 __ rorl(first_reg, second_reg);
3852 } else {
3853 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
3854 __ rorl(first_reg, imm);
3855 }
3856 break;
3857 case Primitive::kPrimLong:
3858 if (second.IsRegister()) {
3859 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3860 __ rorq(first_reg, second_reg);
3861 } else {
3862 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
3863 __ rorq(first_reg, imm);
3864 }
3865 break;
3866 default:
3867 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3868 UNREACHABLE();
3869 }
3870}
3871
Calin Juravle9aec02f2014-11-18 23:06:35 +00003872void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3873 HandleShift(shl);
3874}
3875
3876void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3877 HandleShift(shl);
3878}
3879
3880void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3881 HandleShift(shr);
3882}
3883
3884void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3885 HandleShift(shr);
3886}
3887
3888void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3889 HandleShift(ushr);
3890}
3891
3892void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3893 HandleShift(ushr);
3894}
3895
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003896void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003897 LocationSummary* locations =
3898 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003899 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray729645a2015-11-19 13:29:02 +00003900 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3901 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003902 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003903}
3904
3905void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003906 // Note: if heap poisoning is enabled, the entry point takes cares
3907 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003908 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3909 instruction,
3910 instruction->GetDexPc(),
3911 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003912 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003913
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01003914 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003915}
3916
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003917void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3918 LocationSummary* locations =
3919 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3920 InvokeRuntimeCallingConvention calling_convention;
3921 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003922 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003923 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003924 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003925}
3926
3927void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3928 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003929 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3930 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003931 // Note: if heap poisoning is enabled, the entry point takes cares
3932 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003933 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3934 instruction,
3935 instruction->GetDexPc(),
3936 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003937 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003938
3939 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003940}
3941
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003942void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003943 LocationSummary* locations =
3944 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003945 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3946 if (location.IsStackSlot()) {
3947 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3948 } else if (location.IsDoubleStackSlot()) {
3949 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3950 }
3951 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003952}
3953
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003954void InstructionCodeGeneratorX86_64::VisitParameterValue(
3955 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003956 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003957}
3958
3959void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3960 LocationSummary* locations =
3961 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3962 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3963}
3964
3965void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3966 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3967 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003968}
3969
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003970void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003971 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003972 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003973 locations->SetInAt(0, Location::RequiresRegister());
3974 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003975}
3976
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003977void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3978 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003979 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3980 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003981 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003982 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003983 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003984 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003985 break;
3986
3987 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003988 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003989 break;
3990
3991 default:
3992 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3993 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003994}
3995
David Brazdil66d126e2015-04-03 16:02:44 +01003996void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
3997 LocationSummary* locations =
3998 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
3999 locations->SetInAt(0, Location::RequiresRegister());
4000 locations->SetOut(Location::SameAsFirstInput());
4001}
4002
4003void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004004 LocationSummary* locations = bool_not->GetLocations();
4005 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4006 locations->Out().AsRegister<CpuRegister>().AsRegister());
4007 Location out = locations->Out();
4008 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4009}
4010
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004011void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004012 LocationSummary* locations =
4013 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004014 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4015 locations->SetInAt(i, Location::Any());
4016 }
4017 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004018}
4019
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004020void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004021 LOG(FATAL) << "Unimplemented";
4022}
4023
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004024void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004025 /*
4026 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004027 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004028 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4029 */
4030 switch (kind) {
4031 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004032 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004033 break;
4034 }
4035 case MemBarrierKind::kAnyStore:
4036 case MemBarrierKind::kLoadAny:
4037 case MemBarrierKind::kStoreStore: {
4038 // nop
4039 break;
4040 }
4041 default:
4042 LOG(FATAL) << "Unexpected memory barier " << kind;
4043 }
4044}
4045
4046void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4047 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4048
Roland Levillain0d5a2812015-11-13 10:07:31 +00004049 bool object_field_get_with_read_barrier =
4050 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004051 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004052 new (GetGraph()->GetArena()) LocationSummary(instruction,
4053 object_field_get_with_read_barrier ?
4054 LocationSummary::kCallOnSlowPath :
4055 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004056 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004057 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4058 locations->SetOut(Location::RequiresFpuRegister());
4059 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004060 // The output overlaps for an object field get when read barriers
4061 // are enabled: we do not want the move to overwrite the object's
4062 // location, as we need it to emit the read barrier.
4063 locations->SetOut(
4064 Location::RequiresRegister(),
4065 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004066 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004067 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4068 // We need a temporary register for the read barrier marking slow
4069 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4070 locations->AddTemp(Location::RequiresRegister());
4071 }
Calin Juravle52c48962014-12-16 17:02:57 +00004072}
4073
4074void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4075 const FieldInfo& field_info) {
4076 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4077
4078 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004079 Location base_loc = locations->InAt(0);
4080 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004081 Location out = locations->Out();
4082 bool is_volatile = field_info.IsVolatile();
4083 Primitive::Type field_type = field_info.GetFieldType();
4084 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4085
4086 switch (field_type) {
4087 case Primitive::kPrimBoolean: {
4088 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4089 break;
4090 }
4091
4092 case Primitive::kPrimByte: {
4093 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4094 break;
4095 }
4096
4097 case Primitive::kPrimShort: {
4098 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4099 break;
4100 }
4101
4102 case Primitive::kPrimChar: {
4103 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4104 break;
4105 }
4106
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004107 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004108 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4109 break;
4110 }
4111
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004112 case Primitive::kPrimNot: {
4113 // /* HeapReference<Object> */ out = *(base + offset)
4114 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4115 Location temp_loc = locations->GetTemp(0);
4116 // Note that a potential implicit null check is handled in this
4117 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4118 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4119 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4120 if (is_volatile) {
4121 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4122 }
4123 } else {
4124 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4125 codegen_->MaybeRecordImplicitNullCheck(instruction);
4126 if (is_volatile) {
4127 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4128 }
4129 // If read barriers are enabled, emit read barriers other than
4130 // Baker's using a slow path (and also unpoison the loaded
4131 // reference, if heap poisoning is enabled).
4132 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4133 }
4134 break;
4135 }
4136
Calin Juravle52c48962014-12-16 17:02:57 +00004137 case Primitive::kPrimLong: {
4138 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4139 break;
4140 }
4141
4142 case Primitive::kPrimFloat: {
4143 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4144 break;
4145 }
4146
4147 case Primitive::kPrimDouble: {
4148 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4149 break;
4150 }
4151
4152 case Primitive::kPrimVoid:
4153 LOG(FATAL) << "Unreachable type " << field_type;
4154 UNREACHABLE();
4155 }
4156
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004157 if (field_type == Primitive::kPrimNot) {
4158 // Potential implicit null checks, in the case of reference
4159 // fields, are handled in the previous switch statement.
4160 } else {
4161 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004162 }
Roland Levillain4d027112015-07-01 15:41:14 +01004163
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004164 if (is_volatile) {
4165 if (field_type == Primitive::kPrimNot) {
4166 // Memory barriers, in the case of references, are also handled
4167 // in the previous switch statement.
4168 } else {
4169 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4170 }
Roland Levillain4d027112015-07-01 15:41:14 +01004171 }
Calin Juravle52c48962014-12-16 17:02:57 +00004172}
4173
4174void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4175 const FieldInfo& field_info) {
4176 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4177
4178 LocationSummary* locations =
4179 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004180 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004181 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004182 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004183 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004184
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004185 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004186 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004187 if (is_volatile) {
4188 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4189 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4190 } else {
4191 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4192 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004193 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004194 if (is_volatile) {
4195 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4196 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4197 } else {
4198 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4199 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004200 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004201 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004202 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004203 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004204 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004205 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4206 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004207 locations->AddTemp(Location::RequiresRegister());
4208 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004209}
4210
Calin Juravle52c48962014-12-16 17:02:57 +00004211void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004212 const FieldInfo& field_info,
4213 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004214 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4215
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004216 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004217 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4218 Location value = locations->InAt(1);
4219 bool is_volatile = field_info.IsVolatile();
4220 Primitive::Type field_type = field_info.GetFieldType();
4221 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4222
4223 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004224 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004225 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004226
Mark Mendellea5af682015-10-22 17:35:49 -04004227 bool maybe_record_implicit_null_check_done = false;
4228
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004229 switch (field_type) {
4230 case Primitive::kPrimBoolean:
4231 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004232 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004233 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004234 __ movb(Address(base, offset), Immediate(v));
4235 } else {
4236 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4237 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004238 break;
4239 }
4240
4241 case Primitive::kPrimShort:
4242 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004243 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004244 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004245 __ movw(Address(base, offset), Immediate(v));
4246 } else {
4247 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4248 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004249 break;
4250 }
4251
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004252 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004253 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004254 if (value.IsConstant()) {
4255 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004256 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4257 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4258 // Note: if heap poisoning is enabled, no need to poison
4259 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004260 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004261 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004262 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4263 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4264 __ movl(temp, value.AsRegister<CpuRegister>());
4265 __ PoisonHeapReference(temp);
4266 __ movl(Address(base, offset), temp);
4267 } else {
4268 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4269 }
Mark Mendell40741f32015-04-20 22:10:34 -04004270 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004271 break;
4272 }
4273
4274 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004275 if (value.IsConstant()) {
4276 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004277 codegen_->MoveInt64ToAddress(Address(base, offset),
4278 Address(base, offset + sizeof(int32_t)),
4279 v,
4280 instruction);
4281 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004282 } else {
4283 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4284 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004285 break;
4286 }
4287
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004288 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004289 if (value.IsConstant()) {
4290 int32_t v =
4291 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4292 __ movl(Address(base, offset), Immediate(v));
4293 } else {
4294 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4295 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004296 break;
4297 }
4298
4299 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004300 if (value.IsConstant()) {
4301 int64_t v =
4302 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4303 codegen_->MoveInt64ToAddress(Address(base, offset),
4304 Address(base, offset + sizeof(int32_t)),
4305 v,
4306 instruction);
4307 maybe_record_implicit_null_check_done = true;
4308 } else {
4309 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4310 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004311 break;
4312 }
4313
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004314 case Primitive::kPrimVoid:
4315 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004316 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004317 }
Calin Juravle52c48962014-12-16 17:02:57 +00004318
Mark Mendellea5af682015-10-22 17:35:49 -04004319 if (!maybe_record_implicit_null_check_done) {
4320 codegen_->MaybeRecordImplicitNullCheck(instruction);
4321 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004322
4323 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4324 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4325 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004326 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004327 }
4328
Calin Juravle52c48962014-12-16 17:02:57 +00004329 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004330 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004331 }
4332}
4333
4334void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4335 HandleFieldSet(instruction, instruction->GetFieldInfo());
4336}
4337
4338void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004339 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004340}
4341
4342void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004343 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004344}
4345
4346void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004347 HandleFieldGet(instruction, instruction->GetFieldInfo());
4348}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004349
Calin Juravle52c48962014-12-16 17:02:57 +00004350void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4351 HandleFieldGet(instruction);
4352}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004353
Calin Juravle52c48962014-12-16 17:02:57 +00004354void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4355 HandleFieldGet(instruction, instruction->GetFieldInfo());
4356}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004357
Calin Juravle52c48962014-12-16 17:02:57 +00004358void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4359 HandleFieldSet(instruction, instruction->GetFieldInfo());
4360}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004361
Calin Juravle52c48962014-12-16 17:02:57 +00004362void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004363 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004364}
4365
Calin Juravlee460d1d2015-09-29 04:52:17 +01004366void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4367 HUnresolvedInstanceFieldGet* instruction) {
4368 FieldAccessCallingConventionX86_64 calling_convention;
4369 codegen_->CreateUnresolvedFieldLocationSummary(
4370 instruction, instruction->GetFieldType(), calling_convention);
4371}
4372
4373void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4374 HUnresolvedInstanceFieldGet* instruction) {
4375 FieldAccessCallingConventionX86_64 calling_convention;
4376 codegen_->GenerateUnresolvedFieldAccess(instruction,
4377 instruction->GetFieldType(),
4378 instruction->GetFieldIndex(),
4379 instruction->GetDexPc(),
4380 calling_convention);
4381}
4382
4383void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4384 HUnresolvedInstanceFieldSet* instruction) {
4385 FieldAccessCallingConventionX86_64 calling_convention;
4386 codegen_->CreateUnresolvedFieldLocationSummary(
4387 instruction, instruction->GetFieldType(), calling_convention);
4388}
4389
4390void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4391 HUnresolvedInstanceFieldSet* instruction) {
4392 FieldAccessCallingConventionX86_64 calling_convention;
4393 codegen_->GenerateUnresolvedFieldAccess(instruction,
4394 instruction->GetFieldType(),
4395 instruction->GetFieldIndex(),
4396 instruction->GetDexPc(),
4397 calling_convention);
4398}
4399
4400void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4401 HUnresolvedStaticFieldGet* instruction) {
4402 FieldAccessCallingConventionX86_64 calling_convention;
4403 codegen_->CreateUnresolvedFieldLocationSummary(
4404 instruction, instruction->GetFieldType(), calling_convention);
4405}
4406
4407void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4408 HUnresolvedStaticFieldGet* instruction) {
4409 FieldAccessCallingConventionX86_64 calling_convention;
4410 codegen_->GenerateUnresolvedFieldAccess(instruction,
4411 instruction->GetFieldType(),
4412 instruction->GetFieldIndex(),
4413 instruction->GetDexPc(),
4414 calling_convention);
4415}
4416
4417void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4418 HUnresolvedStaticFieldSet* instruction) {
4419 FieldAccessCallingConventionX86_64 calling_convention;
4420 codegen_->CreateUnresolvedFieldLocationSummary(
4421 instruction, instruction->GetFieldType(), calling_convention);
4422}
4423
4424void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4425 HUnresolvedStaticFieldSet* instruction) {
4426 FieldAccessCallingConventionX86_64 calling_convention;
4427 codegen_->GenerateUnresolvedFieldAccess(instruction,
4428 instruction->GetFieldType(),
4429 instruction->GetFieldIndex(),
4430 instruction->GetDexPc(),
4431 calling_convention);
4432}
4433
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004434void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004435 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4436 ? LocationSummary::kCallOnSlowPath
4437 : LocationSummary::kNoCall;
4438 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4439 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004440 ? Location::RequiresRegister()
4441 : Location::Any();
4442 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004443 if (instruction->HasUses()) {
4444 locations->SetOut(Location::SameAsFirstInput());
4445 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004446}
4447
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004448void InstructionCodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004449 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4450 return;
4451 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004452 LocationSummary* locations = instruction->GetLocations();
4453 Location obj = locations->InAt(0);
4454
4455 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
4456 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4457}
4458
4459void InstructionCodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004460 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004461 codegen_->AddSlowPath(slow_path);
4462
4463 LocationSummary* locations = instruction->GetLocations();
4464 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004465
4466 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004467 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004468 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004469 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004470 } else {
4471 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004472 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004473 __ jmp(slow_path->GetEntryLabel());
4474 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004475 }
4476 __ j(kEqual, slow_path->GetEntryLabel());
4477}
4478
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004479void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004480 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004481 GenerateImplicitNullCheck(instruction);
4482 } else {
4483 GenerateExplicitNullCheck(instruction);
4484 }
4485}
4486
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004487void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004488 bool object_array_get_with_read_barrier =
4489 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004490 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004491 new (GetGraph()->GetArena()) LocationSummary(instruction,
4492 object_array_get_with_read_barrier ?
4493 LocationSummary::kCallOnSlowPath :
4494 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004495 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004496 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004497 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4498 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4499 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004500 // The output overlaps for an object array get when read barriers
4501 // are enabled: we do not want the move to overwrite the array's
4502 // location, as we need it to emit the read barrier.
4503 locations->SetOut(
4504 Location::RequiresRegister(),
4505 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004506 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004507 // We need a temporary register for the read barrier marking slow
4508 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4509 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4510 locations->AddTemp(Location::RequiresRegister());
4511 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004512}
4513
4514void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4515 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004516 Location obj_loc = locations->InAt(0);
4517 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004518 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004519 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004520
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004521 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004522 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004523 case Primitive::kPrimBoolean: {
4524 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004525 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004526 if (index.IsConstant()) {
4527 __ movzxb(out, Address(obj,
4528 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4529 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004530 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004531 }
4532 break;
4533 }
4534
4535 case Primitive::kPrimByte: {
4536 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004537 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004538 if (index.IsConstant()) {
4539 __ movsxb(out, Address(obj,
4540 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4541 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004542 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004543 }
4544 break;
4545 }
4546
4547 case Primitive::kPrimShort: {
4548 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004549 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004550 if (index.IsConstant()) {
4551 __ movsxw(out, Address(obj,
4552 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4553 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004554 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004555 }
4556 break;
4557 }
4558
4559 case Primitive::kPrimChar: {
4560 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004561 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004562 if (index.IsConstant()) {
4563 __ movzxw(out, Address(obj,
4564 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4565 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004566 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004567 }
4568 break;
4569 }
4570
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004571 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004572 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004573 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004574 if (index.IsConstant()) {
4575 __ movl(out, Address(obj,
4576 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4577 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004578 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004579 }
4580 break;
4581 }
4582
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004583 case Primitive::kPrimNot: {
4584 static_assert(
4585 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4586 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4587 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4588 // /* HeapReference<Object> */ out =
4589 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4590 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4591 Location temp = locations->GetTemp(0);
4592 // Note that a potential implicit null check is handled in this
4593 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4594 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4595 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4596 } else {
4597 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4598 if (index.IsConstant()) {
4599 uint32_t offset =
4600 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4601 __ movl(out, Address(obj, offset));
4602 codegen_->MaybeRecordImplicitNullCheck(instruction);
4603 // If read barriers are enabled, emit read barriers other than
4604 // Baker's using a slow path (and also unpoison the loaded
4605 // reference, if heap poisoning is enabled).
4606 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4607 } else {
4608 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4609 codegen_->MaybeRecordImplicitNullCheck(instruction);
4610 // If read barriers are enabled, emit read barriers other than
4611 // Baker's using a slow path (and also unpoison the loaded
4612 // reference, if heap poisoning is enabled).
4613 codegen_->MaybeGenerateReadBarrierSlow(
4614 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4615 }
4616 }
4617 break;
4618 }
4619
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004620 case Primitive::kPrimLong: {
4621 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004622 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004623 if (index.IsConstant()) {
4624 __ movq(out, Address(obj,
4625 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4626 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004627 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004628 }
4629 break;
4630 }
4631
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004632 case Primitive::kPrimFloat: {
4633 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004634 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004635 if (index.IsConstant()) {
4636 __ movss(out, Address(obj,
4637 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4638 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004639 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004640 }
4641 break;
4642 }
4643
4644 case Primitive::kPrimDouble: {
4645 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004646 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004647 if (index.IsConstant()) {
4648 __ movsd(out, Address(obj,
4649 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4650 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004651 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004652 }
4653 break;
4654 }
4655
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004656 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004657 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004658 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004659 }
Roland Levillain4d027112015-07-01 15:41:14 +01004660
4661 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004662 // Potential implicit null checks, in the case of reference
4663 // arrays, are handled in the previous switch statement.
4664 } else {
4665 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004666 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004667}
4668
4669void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004670 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004671
4672 bool needs_write_barrier =
4673 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004674 bool may_need_runtime_call = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004675 bool object_array_set_with_read_barrier =
4676 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004677
Nicolas Geoffray39468442014-09-02 15:17:15 +01004678 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004679 instruction,
Roland Levillain0d5a2812015-11-13 10:07:31 +00004680 (may_need_runtime_call || object_array_set_with_read_barrier) ?
4681 LocationSummary::kCallOnSlowPath :
4682 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004683
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004684 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004685 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4686 if (Primitive::IsFloatingPointType(value_type)) {
4687 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004688 } else {
4689 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4690 }
4691
4692 if (needs_write_barrier) {
4693 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004694
4695 // This first temporary register is possibly used for heap
4696 // reference poisoning and/or read barrier emission too.
4697 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004698 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004699 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004700}
4701
4702void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4703 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004704 Location array_loc = locations->InAt(0);
4705 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004706 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004707 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004708 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004709 bool may_need_runtime_call = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004710 bool needs_write_barrier =
4711 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004712 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4713 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4714 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715
4716 switch (value_type) {
4717 case Primitive::kPrimBoolean:
4718 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004719 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4720 Address address = index.IsConstant()
4721 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4722 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4723 if (value.IsRegister()) {
4724 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004725 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004726 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004727 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004728 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004729 break;
4730 }
4731
4732 case Primitive::kPrimShort:
4733 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004734 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4735 Address address = index.IsConstant()
4736 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4737 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4738 if (value.IsRegister()) {
4739 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004740 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004741 DCHECK(value.IsConstant()) << value;
4742 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004743 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004744 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004745 break;
4746 }
4747
4748 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004749 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4750 Address address = index.IsConstant()
4751 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4752 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004753
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004754 if (!value.IsRegister()) {
4755 // Just setting null.
4756 DCHECK(instruction->InputAt(2)->IsNullConstant());
4757 DCHECK(value.IsConstant()) << value;
4758 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004759 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004760 DCHECK(!needs_write_barrier);
4761 DCHECK(!may_need_runtime_call);
4762 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004763 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004764
4765 DCHECK(needs_write_barrier);
4766 CpuRegister register_value = value.AsRegister<CpuRegister>();
4767 NearLabel done, not_null, do_put;
4768 SlowPathCode* slow_path = nullptr;
4769 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4770 if (may_need_runtime_call) {
4771 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4772 codegen_->AddSlowPath(slow_path);
4773 if (instruction->GetValueCanBeNull()) {
4774 __ testl(register_value, register_value);
4775 __ j(kNotEqual, &not_null);
4776 __ movl(address, Immediate(0));
4777 codegen_->MaybeRecordImplicitNullCheck(instruction);
4778 __ jmp(&done);
4779 __ Bind(&not_null);
4780 }
4781
Roland Levillain0d5a2812015-11-13 10:07:31 +00004782 if (kEmitCompilerReadBarrier) {
4783 // When read barriers are enabled, the type checking
4784 // instrumentation requires two read barriers:
4785 //
4786 // __ movl(temp2, temp);
4787 // // /* HeapReference<Class> */ temp = temp->component_type_
4788 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004789 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004790 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4791 //
4792 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4793 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004794 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004795 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4796 //
4797 // __ cmpl(temp, temp2);
4798 //
4799 // However, the second read barrier may trash `temp`, as it
4800 // is a temporary register, and as such would not be saved
4801 // along with live registers before calling the runtime (nor
4802 // restored afterwards). So in this case, we bail out and
4803 // delegate the work to the array set slow path.
4804 //
4805 // TODO: Extend the register allocator to support a new
4806 // "(locally) live temp" location so as to avoid always
4807 // going into the slow path when read barriers are enabled.
4808 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004809 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004810 // /* HeapReference<Class> */ temp = array->klass_
4811 __ movl(temp, Address(array, class_offset));
4812 codegen_->MaybeRecordImplicitNullCheck(instruction);
4813 __ MaybeUnpoisonHeapReference(temp);
4814
4815 // /* HeapReference<Class> */ temp = temp->component_type_
4816 __ movl(temp, Address(temp, component_offset));
4817 // If heap poisoning is enabled, no need to unpoison `temp`
4818 // nor the object reference in `register_value->klass`, as
4819 // we are comparing two poisoned references.
4820 __ cmpl(temp, Address(register_value, class_offset));
4821
4822 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4823 __ j(kEqual, &do_put);
4824 // If heap poisoning is enabled, the `temp` reference has
4825 // not been unpoisoned yet; unpoison it now.
4826 __ MaybeUnpoisonHeapReference(temp);
4827
4828 // /* HeapReference<Class> */ temp = temp->super_class_
4829 __ movl(temp, Address(temp, super_offset));
4830 // If heap poisoning is enabled, no need to unpoison
4831 // `temp`, as we are comparing against null below.
4832 __ testl(temp, temp);
4833 __ j(kNotEqual, slow_path->GetEntryLabel());
4834 __ Bind(&do_put);
4835 } else {
4836 __ j(kNotEqual, slow_path->GetEntryLabel());
4837 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004838 }
4839 }
4840
4841 if (kPoisonHeapReferences) {
4842 __ movl(temp, register_value);
4843 __ PoisonHeapReference(temp);
4844 __ movl(address, temp);
4845 } else {
4846 __ movl(address, register_value);
4847 }
4848 if (!may_need_runtime_call) {
4849 codegen_->MaybeRecordImplicitNullCheck(instruction);
4850 }
4851
4852 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4853 codegen_->MarkGCCard(
4854 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4855 __ Bind(&done);
4856
4857 if (slow_path != nullptr) {
4858 __ Bind(slow_path->GetExitLabel());
4859 }
4860
4861 break;
4862 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004863
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004864 case Primitive::kPrimInt: {
4865 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4866 Address address = index.IsConstant()
4867 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4868 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4869 if (value.IsRegister()) {
4870 __ movl(address, value.AsRegister<CpuRegister>());
4871 } else {
4872 DCHECK(value.IsConstant()) << value;
4873 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4874 __ movl(address, Immediate(v));
4875 }
4876 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004877 break;
4878 }
4879
4880 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004881 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4882 Address address = index.IsConstant()
4883 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4884 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4885 if (value.IsRegister()) {
4886 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004887 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004888 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004889 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004890 Address address_high = index.IsConstant()
4891 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4892 offset + sizeof(int32_t))
4893 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4894 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004895 }
4896 break;
4897 }
4898
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004899 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004900 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4901 Address address = index.IsConstant()
4902 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4903 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004904 if (value.IsFpuRegister()) {
4905 __ movss(address, value.AsFpuRegister<XmmRegister>());
4906 } else {
4907 DCHECK(value.IsConstant());
4908 int32_t v =
4909 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4910 __ movl(address, Immediate(v));
4911 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004912 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004913 break;
4914 }
4915
4916 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004917 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4918 Address address = index.IsConstant()
4919 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4920 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004921 if (value.IsFpuRegister()) {
4922 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4923 codegen_->MaybeRecordImplicitNullCheck(instruction);
4924 } else {
4925 int64_t v =
4926 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4927 Address address_high = index.IsConstant()
4928 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4929 offset + sizeof(int32_t))
4930 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4931 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4932 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004933 break;
4934 }
4935
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004936 case Primitive::kPrimVoid:
4937 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004938 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004939 }
4940}
4941
4942void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004943 LocationSummary* locations =
4944 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004945 locations->SetInAt(0, Location::RequiresRegister());
4946 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004947}
4948
4949void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
4950 LocationSummary* locations = instruction->GetLocations();
4951 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004952 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4953 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004954 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004955 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004956}
4957
4958void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004959 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4960 ? LocationSummary::kCallOnSlowPath
4961 : LocationSummary::kNoCall;
4962 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004963 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04004964 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004965 if (instruction->HasUses()) {
4966 locations->SetOut(Location::SameAsFirstInput());
4967 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004968}
4969
4970void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4971 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004972 Location index_loc = locations->InAt(0);
4973 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07004974 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004975 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004976
Mark Mendell99dbd682015-04-22 16:18:52 -04004977 if (length_loc.IsConstant()) {
4978 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4979 if (index_loc.IsConstant()) {
4980 // BCE will remove the bounds check if we are guarenteed to pass.
4981 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4982 if (index < 0 || index >= length) {
4983 codegen_->AddSlowPath(slow_path);
4984 __ jmp(slow_path->GetEntryLabel());
4985 } else {
4986 // Some optimization after BCE may have generated this, and we should not
4987 // generate a bounds check if it is a valid range.
4988 }
4989 return;
4990 }
4991
4992 // We have to reverse the jump condition because the length is the constant.
4993 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
4994 __ cmpl(index_reg, Immediate(length));
4995 codegen_->AddSlowPath(slow_path);
4996 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004997 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04004998 CpuRegister length = length_loc.AsRegister<CpuRegister>();
4999 if (index_loc.IsConstant()) {
5000 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5001 __ cmpl(length, Immediate(value));
5002 } else {
5003 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5004 }
5005 codegen_->AddSlowPath(slow_path);
5006 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005007 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005008}
5009
5010void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5011 CpuRegister card,
5012 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005013 CpuRegister value,
5014 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005015 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005016 if (value_can_be_null) {
5017 __ testl(value, value);
5018 __ j(kEqual, &is_null);
5019 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005020 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5021 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005022 __ movq(temp, object);
5023 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005024 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005025 if (value_can_be_null) {
5026 __ Bind(&is_null);
5027 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005028}
5029
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005030void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
5031 temp->SetLocations(nullptr);
5032}
5033
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005034void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp ATTRIBUTE_UNUSED) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005035 // Nothing to do, this is driven by the code generator.
5036}
5037
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005038void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005039 LOG(FATAL) << "Unimplemented";
5040}
5041
5042void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005043 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5044}
5045
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005046void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5047 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5048}
5049
5050void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005051 HBasicBlock* block = instruction->GetBlock();
5052 if (block->GetLoopInformation() != nullptr) {
5053 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5054 // The back edge will generate the suspend check.
5055 return;
5056 }
5057 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5058 // The goto will generate the suspend check.
5059 return;
5060 }
5061 GenerateSuspendCheck(instruction, nullptr);
5062}
5063
5064void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5065 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005066 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005067 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5068 if (slow_path == nullptr) {
5069 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5070 instruction->SetSlowPath(slow_path);
5071 codegen_->AddSlowPath(slow_path);
5072 if (successor != nullptr) {
5073 DCHECK(successor->IsLoopHeader());
5074 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5075 }
5076 } else {
5077 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5078 }
5079
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005080 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5081 /* no_rip */ true),
5082 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005083 if (successor == nullptr) {
5084 __ j(kNotEqual, slow_path->GetEntryLabel());
5085 __ Bind(slow_path->GetReturnLabel());
5086 } else {
5087 __ j(kEqual, codegen_->GetLabelOf(successor));
5088 __ jmp(slow_path->GetEntryLabel());
5089 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005090}
5091
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005092X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5093 return codegen_->GetAssembler();
5094}
5095
5096void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005097 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005098 Location source = move->GetSource();
5099 Location destination = move->GetDestination();
5100
5101 if (source.IsRegister()) {
5102 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005103 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005104 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005105 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005106 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005107 } else {
5108 DCHECK(destination.IsDoubleStackSlot());
5109 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005110 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005111 }
5112 } else if (source.IsStackSlot()) {
5113 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005115 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005116 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005117 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005118 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005119 } else {
5120 DCHECK(destination.IsStackSlot());
5121 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5122 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5123 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005124 } else if (source.IsDoubleStackSlot()) {
5125 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005126 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005127 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005128 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005129 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5130 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005131 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005132 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005133 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5134 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5135 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005136 } else if (source.IsConstant()) {
5137 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005138 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5139 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005140 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005141 if (value == 0) {
5142 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5143 } else {
5144 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5145 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005146 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005147 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005148 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005149 }
5150 } else if (constant->IsLongConstant()) {
5151 int64_t value = constant->AsLongConstant()->GetValue();
5152 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005153 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005154 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005155 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005156 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005157 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005158 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005159 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005160 int32_t value = bit_cast<int32_t, float>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005161 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005162 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5163 if (value == 0) {
5164 // easy FP 0.0.
5165 __ xorps(dest, dest);
5166 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005167 __ movss(dest, codegen_->LiteralFloatAddress(fp_value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005168 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005169 } else {
5170 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell92e83bf2015-05-07 11:25:03 -04005171 Immediate imm(value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005172 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5173 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005174 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005175 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005176 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005177 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005178 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005179 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5180 if (value == 0) {
5181 __ xorpd(dest, dest);
5182 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005183 __ movsd(dest, codegen_->LiteralDoubleAddress(fp_value));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005184 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005185 } else {
5186 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005187 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005188 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005189 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005190 } else if (source.IsFpuRegister()) {
5191 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005192 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005193 } else if (destination.IsStackSlot()) {
5194 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005195 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005196 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005197 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005198 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005199 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005200 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005201 }
5202}
5203
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005204void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005205 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005206 __ movl(Address(CpuRegister(RSP), mem), reg);
5207 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005208}
5209
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005210void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005211 ScratchRegisterScope ensure_scratch(
5212 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5213
5214 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5215 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5216 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5217 Address(CpuRegister(RSP), mem2 + stack_offset));
5218 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5219 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5220 CpuRegister(ensure_scratch.GetRegister()));
5221}
5222
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005223void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5224 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5225 __ movq(Address(CpuRegister(RSP), mem), reg);
5226 __ movq(reg, CpuRegister(TMP));
5227}
5228
5229void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5230 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005231 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005232
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005233 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5234 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5235 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5236 Address(CpuRegister(RSP), mem2 + stack_offset));
5237 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5238 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5239 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005240}
5241
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005242void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5243 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5244 __ movss(Address(CpuRegister(RSP), mem), reg);
5245 __ movd(reg, CpuRegister(TMP));
5246}
5247
5248void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5249 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5250 __ movsd(Address(CpuRegister(RSP), mem), reg);
5251 __ movd(reg, CpuRegister(TMP));
5252}
5253
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005254void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005255 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005256 Location source = move->GetSource();
5257 Location destination = move->GetDestination();
5258
5259 if (source.IsRegister() && destination.IsRegister()) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005260 __ xchgq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005261 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005262 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005263 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005264 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005265 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005266 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5267 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005268 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005269 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005270 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005271 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5272 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005274 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5275 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5276 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005278 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005279 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005280 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005282 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005284 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005285 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005287 }
5288}
5289
5290
5291void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5292 __ pushq(CpuRegister(reg));
5293}
5294
5295
5296void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5297 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005298}
5299
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005300void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005301 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005302 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5303 Immediate(mirror::Class::kStatusInitialized));
5304 __ j(kLess, slow_path->GetEntryLabel());
5305 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005306 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005307}
5308
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005309void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005310 InvokeRuntimeCallingConvention calling_convention;
5311 CodeGenerator::CreateLoadClassLocationSummary(
5312 cls,
5313 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005314 Location::RegisterLocation(RAX),
5315 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005316}
5317
5318void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005319 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005320 if (cls->NeedsAccessCheck()) {
5321 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5322 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5323 cls,
5324 cls->GetDexPc(),
5325 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005326 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005327 return;
5328 }
5329
Roland Levillain0d5a2812015-11-13 10:07:31 +00005330 Location out_loc = locations->Out();
5331 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005332 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005333
Calin Juravle580b6092015-10-06 17:35:58 +01005334 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005335 DCHECK(!cls->CanCallRuntime());
5336 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005337 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5338 GenerateGcRootFieldLoad(
5339 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005340 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005341 // /* GcRoot<mirror::Class>[] */ out =
5342 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5343 __ movq(out, Address(current_method,
5344 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005345 // /* GcRoot<mirror::Class> */ out = out[type_index]
5346 GenerateGcRootFieldLoad(cls, out_loc, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Roland Levillain4d027112015-07-01 15:41:14 +01005347
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005348 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5349 DCHECK(cls->CanCallRuntime());
5350 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5351 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5352 codegen_->AddSlowPath(slow_path);
5353 if (!cls->IsInDexCache()) {
5354 __ testl(out, out);
5355 __ j(kEqual, slow_path->GetEntryLabel());
5356 }
5357 if (cls->MustGenerateClinitCheck()) {
5358 GenerateClassInitializationCheck(slow_path, out);
5359 } else {
5360 __ Bind(slow_path->GetExitLabel());
5361 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005362 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005363 }
5364}
5365
5366void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5367 LocationSummary* locations =
5368 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5369 locations->SetInAt(0, Location::RequiresRegister());
5370 if (check->HasUses()) {
5371 locations->SetOut(Location::SameAsFirstInput());
5372 }
5373}
5374
5375void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005376 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005377 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005378 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005379 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005380 GenerateClassInitializationCheck(slow_path,
5381 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005382}
5383
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005384void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005385 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
5386 ? LocationSummary::kCallOnSlowPath
5387 : LocationSummary::kNoCall;
5388 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005389 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005390 locations->SetOut(Location::RequiresRegister());
5391}
5392
5393void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005394 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005395 Location out_loc = locations->Out();
5396 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005397 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005398
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005399 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5400 GenerateGcRootFieldLoad(
5401 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005402 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5403 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005404 // /* GcRoot<mirror::String> */ out = out[string_index]
5405 GenerateGcRootFieldLoad(
5406 load, out_loc, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005407
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005408 if (!load->IsInDexCache()) {
5409 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5410 codegen_->AddSlowPath(slow_path);
5411 __ testl(out, out);
5412 __ j(kEqual, slow_path->GetEntryLabel());
5413 __ Bind(slow_path->GetExitLabel());
5414 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005415}
5416
David Brazdilcb1c0552015-08-04 16:22:25 +01005417static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005418 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5419 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005420}
5421
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005422void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5423 LocationSummary* locations =
5424 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5425 locations->SetOut(Location::RequiresRegister());
5426}
5427
5428void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005429 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5430}
5431
5432void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5433 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5434}
5435
5436void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5437 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005438}
5439
5440void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5441 LocationSummary* locations =
5442 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5443 InvokeRuntimeCallingConvention calling_convention;
5444 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5445}
5446
5447void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005448 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5449 instruction,
5450 instruction->GetDexPc(),
5451 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005452 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005453}
5454
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005455static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5456 return kEmitCompilerReadBarrier &&
5457 (kUseBakerReadBarrier ||
5458 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5459 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5460 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5461}
5462
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005463void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005464 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005465 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5466 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005467 case TypeCheckKind::kExactCheck:
5468 case TypeCheckKind::kAbstractClassCheck:
5469 case TypeCheckKind::kClassHierarchyCheck:
5470 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005471 call_kind =
5472 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005473 break;
5474 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005475 case TypeCheckKind::kUnresolvedCheck:
5476 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005477 call_kind = LocationSummary::kCallOnSlowPath;
5478 break;
5479 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005480
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005481 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005482 locations->SetInAt(0, Location::RequiresRegister());
5483 locations->SetInAt(1, Location::Any());
5484 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5485 locations->SetOut(Location::RequiresRegister());
5486 // When read barriers are enabled, we need a temporary register for
5487 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005488 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005489 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005490 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005491}
5492
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005493void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005494 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005495 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005496 Location obj_loc = locations->InAt(0);
5497 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005498 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005499 Location out_loc = locations->Out();
5500 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005501 Location temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5502 locations->GetTemp(0) :
5503 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005504 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005505 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5506 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5507 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005508 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005509 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005510
5511 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005512 // Avoid null check if we know obj is not null.
5513 if (instruction->MustDoNullCheck()) {
5514 __ testl(obj, obj);
5515 __ j(kEqual, &zero);
5516 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005517
Roland Levillain0d5a2812015-11-13 10:07:31 +00005518 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005519 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005520
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005521 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005522 case TypeCheckKind::kExactCheck: {
5523 if (cls.IsRegister()) {
5524 __ cmpl(out, cls.AsRegister<CpuRegister>());
5525 } else {
5526 DCHECK(cls.IsStackSlot()) << cls;
5527 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5528 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005529 if (zero.IsLinked()) {
5530 // Classes must be equal for the instanceof to succeed.
5531 __ j(kNotEqual, &zero);
5532 __ movl(out, Immediate(1));
5533 __ jmp(&done);
5534 } else {
5535 __ setcc(kEqual, out);
5536 // setcc only sets the low byte.
5537 __ andl(out, Immediate(1));
5538 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005539 break;
5540 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005541
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005542 case TypeCheckKind::kAbstractClassCheck: {
5543 // If the class is abstract, we eagerly fetch the super class of the
5544 // object to avoid doing a comparison we know will fail.
5545 NearLabel loop, success;
5546 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005547 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005548 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005549 __ testl(out, out);
5550 // If `out` is null, we use it for the result, and jump to `done`.
5551 __ j(kEqual, &done);
5552 if (cls.IsRegister()) {
5553 __ cmpl(out, cls.AsRegister<CpuRegister>());
5554 } else {
5555 DCHECK(cls.IsStackSlot()) << cls;
5556 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5557 }
5558 __ j(kNotEqual, &loop);
5559 __ movl(out, Immediate(1));
5560 if (zero.IsLinked()) {
5561 __ jmp(&done);
5562 }
5563 break;
5564 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005565
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005566 case TypeCheckKind::kClassHierarchyCheck: {
5567 // Walk over the class hierarchy to find a match.
5568 NearLabel loop, success;
5569 __ Bind(&loop);
5570 if (cls.IsRegister()) {
5571 __ cmpl(out, cls.AsRegister<CpuRegister>());
5572 } else {
5573 DCHECK(cls.IsStackSlot()) << cls;
5574 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5575 }
5576 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005577 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005578 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005579 __ testl(out, out);
5580 __ j(kNotEqual, &loop);
5581 // If `out` is null, we use it for the result, and jump to `done`.
5582 __ jmp(&done);
5583 __ Bind(&success);
5584 __ movl(out, Immediate(1));
5585 if (zero.IsLinked()) {
5586 __ jmp(&done);
5587 }
5588 break;
5589 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005590
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005591 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005592 // Do an exact check.
5593 NearLabel exact_check;
5594 if (cls.IsRegister()) {
5595 __ cmpl(out, cls.AsRegister<CpuRegister>());
5596 } else {
5597 DCHECK(cls.IsStackSlot()) << cls;
5598 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5599 }
5600 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005601 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005602 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005603 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005604 __ testl(out, out);
5605 // If `out` is null, we use it for the result, and jump to `done`.
5606 __ j(kEqual, &done);
5607 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5608 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005609 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005610 __ movl(out, Immediate(1));
5611 __ jmp(&done);
5612 break;
5613 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005614
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005615 case TypeCheckKind::kArrayCheck: {
5616 if (cls.IsRegister()) {
5617 __ cmpl(out, cls.AsRegister<CpuRegister>());
5618 } else {
5619 DCHECK(cls.IsStackSlot()) << cls;
5620 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5621 }
5622 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005623 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5624 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005625 codegen_->AddSlowPath(slow_path);
5626 __ j(kNotEqual, slow_path->GetEntryLabel());
5627 __ movl(out, Immediate(1));
5628 if (zero.IsLinked()) {
5629 __ jmp(&done);
5630 }
5631 break;
5632 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005633
Calin Juravle98893e12015-10-02 21:05:03 +01005634 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005635 case TypeCheckKind::kInterfaceCheck: {
5636 // Note that we indeed only call on slow path, but we always go
5637 // into the slow path for the unresolved & interface check
5638 // cases.
5639 //
5640 // We cannot directly call the InstanceofNonTrivial runtime
5641 // entry point without resorting to a type checking slow path
5642 // here (i.e. by calling InvokeRuntime directly), as it would
5643 // require to assign fixed registers for the inputs of this
5644 // HInstanceOf instruction (following the runtime calling
5645 // convention), which might be cluttered by the potential first
5646 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005647 //
5648 // TODO: Introduce a new runtime entry point taking the object
5649 // to test (instead of its class) as argument, and let it deal
5650 // with the read barrier issues. This will let us refactor this
5651 // case of the `switch` code as it was previously (with a direct
5652 // call to the runtime not using a type checking slow path).
5653 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005654 DCHECK(locations->OnlyCallsOnSlowPath());
5655 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5656 /* is_fatal */ false);
5657 codegen_->AddSlowPath(slow_path);
5658 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005659 if (zero.IsLinked()) {
5660 __ jmp(&done);
5661 }
5662 break;
5663 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005664 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005665
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005666 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005667 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005668 __ xorl(out, out);
5669 }
5670
5671 if (done.IsLinked()) {
5672 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005673 }
5674
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005675 if (slow_path != nullptr) {
5676 __ Bind(slow_path->GetExitLabel());
5677 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005678}
5679
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005680void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005681 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5682 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005683 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5684 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005685 case TypeCheckKind::kExactCheck:
5686 case TypeCheckKind::kAbstractClassCheck:
5687 case TypeCheckKind::kClassHierarchyCheck:
5688 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005689 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5690 LocationSummary::kCallOnSlowPath :
5691 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005692 break;
5693 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005694 case TypeCheckKind::kUnresolvedCheck:
5695 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005696 call_kind = LocationSummary::kCallOnSlowPath;
5697 break;
5698 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005699 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5700 locations->SetInAt(0, Location::RequiresRegister());
5701 locations->SetInAt(1, Location::Any());
5702 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5703 locations->AddTemp(Location::RequiresRegister());
5704 // When read barriers are enabled, we need an additional temporary
5705 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005706 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005707 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005708 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005709}
5710
5711void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005712 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005713 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005714 Location obj_loc = locations->InAt(0);
5715 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005716 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005717 Location temp_loc = locations->GetTemp(0);
5718 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005719 Location temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
5720 locations->GetTemp(1) :
5721 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005722 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5723 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5724 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5725 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005726
Roland Levillain0d5a2812015-11-13 10:07:31 +00005727 bool is_type_check_slow_path_fatal =
5728 (type_check_kind == TypeCheckKind::kExactCheck ||
5729 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5730 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5731 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5732 !instruction->CanThrowIntoCatchBlock();
5733 SlowPathCode* type_check_slow_path =
5734 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5735 is_type_check_slow_path_fatal);
5736 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005737
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005738 Label done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005739 // Avoid null check if we know obj is not null.
5740 if (instruction->MustDoNullCheck()) {
5741 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005742 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005743 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005744
Roland Levillain0d5a2812015-11-13 10:07:31 +00005745 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005746 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005747
Roland Levillain0d5a2812015-11-13 10:07:31 +00005748 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005749 case TypeCheckKind::kExactCheck:
5750 case TypeCheckKind::kArrayCheck: {
5751 if (cls.IsRegister()) {
5752 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5753 } else {
5754 DCHECK(cls.IsStackSlot()) << cls;
5755 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5756 }
5757 // Jump to slow path for throwing the exception or doing a
5758 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005759 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005760 break;
5761 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005762
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005763 case TypeCheckKind::kAbstractClassCheck: {
5764 // If the class is abstract, we eagerly fetch the super class of the
5765 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005766 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005768 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005769 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005770
5771 // If the class reference currently in `temp` is not null, jump
5772 // to the `compare_classes` label to compare it with the checked
5773 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005774 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005775 __ j(kNotEqual, &compare_classes);
5776 // Otherwise, jump to the slow path to throw the exception.
5777 //
5778 // But before, move back the object's class into `temp` before
5779 // going into the slow path, as it has been overwritten in the
5780 // meantime.
5781 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005782 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005783 __ jmp(type_check_slow_path->GetEntryLabel());
5784
5785 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005786 if (cls.IsRegister()) {
5787 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5788 } else {
5789 DCHECK(cls.IsStackSlot()) << cls;
5790 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5791 }
5792 __ j(kNotEqual, &loop);
5793 break;
5794 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005795
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 case TypeCheckKind::kClassHierarchyCheck: {
5797 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005798 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005799 __ Bind(&loop);
5800 if (cls.IsRegister()) {
5801 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5802 } else {
5803 DCHECK(cls.IsStackSlot()) << cls;
5804 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5805 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005806 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005807
Roland Levillain0d5a2812015-11-13 10:07:31 +00005808 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005809 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005810
5811 // If the class reference currently in `temp` is not null, jump
5812 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005813 __ testl(temp, temp);
5814 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 // Otherwise, jump to the slow path to throw the exception.
5816 //
5817 // But before, move back the object's class into `temp` before
5818 // going into the slow path, as it has been overwritten in the
5819 // meantime.
5820 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005821 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005822 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005823 break;
5824 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005825
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005826 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005827 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005828 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005829 if (cls.IsRegister()) {
5830 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5831 } else {
5832 DCHECK(cls.IsStackSlot()) << cls;
5833 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5834 }
5835 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005836
5837 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005838 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005839 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
5841 // If the component type is not null (i.e. the object is indeed
5842 // an array), jump to label `check_non_primitive_component_type`
5843 // to further check that this component type is not a primitive
5844 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005845 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005846 __ j(kNotEqual, &check_non_primitive_component_type);
5847 // Otherwise, jump to the slow path to throw the exception.
5848 //
5849 // But before, move back the object's class into `temp` before
5850 // going into the slow path, as it has been overwritten in the
5851 // meantime.
5852 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005853 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005854 __ jmp(type_check_slow_path->GetEntryLabel());
5855
5856 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005857 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005858 __ j(kEqual, &done);
5859 // Same comment as above regarding `temp` and the slow path.
5860 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005861 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005862 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005863 break;
5864 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005865
Calin Juravle98893e12015-10-02 21:05:03 +01005866 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005867 case TypeCheckKind::kInterfaceCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005868 // We always go into the type check slow path for the unresolved &
5869 // interface check cases.
5870 //
5871 // We cannot directly call the CheckCast runtime entry point
5872 // without resorting to a type checking slow path here (i.e. by
5873 // calling InvokeRuntime directly), as it would require to
5874 // assign fixed registers for the inputs of this HInstanceOf
5875 // instruction (following the runtime calling convention), which
5876 // might be cluttered by the potential first read barrier
5877 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005878 //
5879 // TODO: Introduce a new runtime entry point taking the object
5880 // to test (instead of its class) as argument, and let it deal
5881 // with the read barrier issues. This will let us refactor this
5882 // case of the `switch` code as it was previously (with a direct
5883 // call to the runtime not using a type checking slow path).
5884 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005885 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005886 break;
5887 }
5888 __ Bind(&done);
5889
Roland Levillain0d5a2812015-11-13 10:07:31 +00005890 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005891}
5892
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005893void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5894 LocationSummary* locations =
5895 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5896 InvokeRuntimeCallingConvention calling_convention;
5897 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5898}
5899
5900void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005901 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
5902 : QUICK_ENTRY_POINT(pUnlockObject),
5903 instruction,
5904 instruction->GetDexPc(),
5905 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005906 if (instruction->IsEnter()) {
5907 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5908 } else {
5909 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5910 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005911}
5912
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005913void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
5914void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
5915void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
5916
5917void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5918 LocationSummary* locations =
5919 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5920 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5921 || instruction->GetResultType() == Primitive::kPrimLong);
5922 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005923 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005924 locations->SetOut(Location::SameAsFirstInput());
5925}
5926
5927void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
5928 HandleBitwiseOperation(instruction);
5929}
5930
5931void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
5932 HandleBitwiseOperation(instruction);
5933}
5934
5935void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
5936 HandleBitwiseOperation(instruction);
5937}
5938
5939void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5940 LocationSummary* locations = instruction->GetLocations();
5941 Location first = locations->InAt(0);
5942 Location second = locations->InAt(1);
5943 DCHECK(first.Equals(locations->Out()));
5944
5945 if (instruction->GetResultType() == Primitive::kPrimInt) {
5946 if (second.IsRegister()) {
5947 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005948 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005949 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005950 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005951 } else {
5952 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005953 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005954 }
5955 } else if (second.IsConstant()) {
5956 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
5957 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005958 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005959 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005960 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005961 } else {
5962 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005963 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005964 }
5965 } else {
5966 Address address(CpuRegister(RSP), second.GetStackIndex());
5967 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005968 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005969 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005970 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005971 } else {
5972 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005973 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005974 }
5975 }
5976 } else {
5977 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005978 CpuRegister first_reg = first.AsRegister<CpuRegister>();
5979 bool second_is_constant = false;
5980 int64_t value = 0;
5981 if (second.IsConstant()) {
5982 second_is_constant = true;
5983 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005984 }
Mark Mendell40741f32015-04-20 22:10:34 -04005985 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005986
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005987 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005988 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04005989 if (is_int32_value) {
5990 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
5991 } else {
5992 __ andq(first_reg, codegen_->LiteralInt64Address(value));
5993 }
5994 } else if (second.IsDoubleStackSlot()) {
5995 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005996 } else {
5997 __ andq(first_reg, second.AsRegister<CpuRegister>());
5998 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005999 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006000 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006001 if (is_int32_value) {
6002 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6003 } else {
6004 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6005 }
6006 } else if (second.IsDoubleStackSlot()) {
6007 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006008 } else {
6009 __ orq(first_reg, second.AsRegister<CpuRegister>());
6010 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006011 } else {
6012 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006013 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006014 if (is_int32_value) {
6015 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6016 } else {
6017 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6018 }
6019 } else if (second.IsDoubleStackSlot()) {
6020 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006021 } else {
6022 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6023 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006024 }
6025 }
6026}
6027
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006028void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6029 Location out,
6030 uint32_t offset,
6031 Location temp) {
6032 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6033 if (kEmitCompilerReadBarrier) {
6034 if (kUseBakerReadBarrier) {
6035 // Load with fast path based Baker's read barrier.
6036 // /* HeapReference<Object> */ out = *(out + offset)
6037 codegen_->GenerateFieldLoadWithBakerReadBarrier(
6038 instruction, out, out_reg, offset, temp, /* needs_null_check */ false);
6039 } else {
6040 // Load with slow path based read barrier.
6041 // Save the value of `out` into `temp` before overwriting it
6042 // in the following move operation, as we will need it for the
6043 // read barrier below.
6044 __ movl(temp.AsRegister<CpuRegister>(), out_reg);
6045 // /* HeapReference<Object> */ out = *(out + offset)
6046 __ movl(out_reg, Address(out_reg, offset));
6047 codegen_->GenerateReadBarrierSlow(instruction, out, out, temp, offset);
6048 }
6049 } else {
6050 // Plain load with no read barrier.
6051 // /* HeapReference<Object> */ out = *(out + offset)
6052 __ movl(out_reg, Address(out_reg, offset));
6053 __ MaybeUnpoisonHeapReference(out_reg);
6054 }
6055}
6056
6057void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6058 Location out,
6059 Location obj,
6060 uint32_t offset,
6061 Location temp) {
6062 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6063 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6064 if (kEmitCompilerReadBarrier) {
6065 if (kUseBakerReadBarrier) {
6066 // Load with fast path based Baker's read barrier.
6067 // /* HeapReference<Object> */ out = *(obj + offset)
6068 codegen_->GenerateFieldLoadWithBakerReadBarrier(
6069 instruction, out, obj_reg, offset, temp, /* needs_null_check */ false);
6070 } else {
6071 // Load with slow path based read barrier.
6072 // /* HeapReference<Object> */ out = *(obj + offset)
6073 __ movl(out_reg, Address(obj_reg, offset));
6074 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6075 }
6076 } else {
6077 // Plain load with no read barrier.
6078 // /* HeapReference<Object> */ out = *(obj + offset)
6079 __ movl(out_reg, Address(obj_reg, offset));
6080 __ MaybeUnpoisonHeapReference(out_reg);
6081 }
6082}
6083
6084void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6085 Location root,
6086 CpuRegister obj,
6087 uint32_t offset) {
6088 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6089 if (kEmitCompilerReadBarrier) {
6090 if (kUseBakerReadBarrier) {
6091 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6092 // Baker's read barrier are used:
6093 //
6094 // root = obj.field;
6095 // if (Thread::Current()->GetIsGcMarking()) {
6096 // root = ReadBarrier::Mark(root)
6097 // }
6098
6099 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6100 __ movl(root_reg, Address(obj, offset));
6101 static_assert(
6102 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6103 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6104 "have different sizes.");
6105 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6106 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6107 "have different sizes.");
6108
6109 // Slow path used to mark the GC root `root`.
6110 SlowPathCode* slow_path =
6111 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6112 codegen_->AddSlowPath(slow_path);
6113
6114 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6115 /* no_rip */ true),
6116 Immediate(0));
6117 __ j(kNotEqual, slow_path->GetEntryLabel());
6118 __ Bind(slow_path->GetExitLabel());
6119 } else {
6120 // GC root loaded through a slow path for read barriers other
6121 // than Baker's.
6122 // /* GcRoot<mirror::Object>* */ root = obj + offset
6123 __ leaq(root_reg, Address(obj, offset));
6124 // /* mirror::Object* */ root = root->Read()
6125 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6126 }
6127 } else {
6128 // Plain GC root load with no read barrier.
6129 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6130 __ movl(root_reg, Address(obj, offset));
6131 }
6132}
6133
6134void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6135 Location ref,
6136 CpuRegister obj,
6137 uint32_t offset,
6138 Location temp,
6139 bool needs_null_check) {
6140 DCHECK(kEmitCompilerReadBarrier);
6141 DCHECK(kUseBakerReadBarrier);
6142
6143 // /* HeapReference<Object> */ ref = *(obj + offset)
6144 Address src(obj, offset);
6145 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6146}
6147
6148void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6149 Location ref,
6150 CpuRegister obj,
6151 uint32_t data_offset,
6152 Location index,
6153 Location temp,
6154 bool needs_null_check) {
6155 DCHECK(kEmitCompilerReadBarrier);
6156 DCHECK(kUseBakerReadBarrier);
6157
6158 // /* HeapReference<Object> */ ref =
6159 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6160 Address src = index.IsConstant() ?
6161 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6162 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6163 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6164}
6165
6166void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6167 Location ref,
6168 CpuRegister obj,
6169 const Address& src,
6170 Location temp,
6171 bool needs_null_check) {
6172 DCHECK(kEmitCompilerReadBarrier);
6173 DCHECK(kUseBakerReadBarrier);
6174
6175 // In slow path based read barriers, the read barrier call is
6176 // inserted after the original load. However, in fast path based
6177 // Baker's read barriers, we need to perform the load of
6178 // mirror::Object::monitor_ *before* the original reference load.
6179 // This load-load ordering is required by the read barrier.
6180 // The fast path/slow path (for Baker's algorithm) should look like:
6181 //
6182 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6183 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6184 // HeapReference<Object> ref = *src; // Original reference load.
6185 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6186 // if (is_gray) {
6187 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6188 // }
6189 //
6190 // Note: the original implementation in ReadBarrier::Barrier is
6191 // slightly more complex as:
6192 // - it implements the load-load fence using a data dependency on
6193 // the high-bits of rb_state, which are expected to be all zeroes;
6194 // - it performs additional checks that we do not do here for
6195 // performance reasons.
6196
6197 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6198 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6199 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6200
6201 // /* int32_t */ monitor = obj->monitor_
6202 __ movl(temp_reg, Address(obj, monitor_offset));
6203 if (needs_null_check) {
6204 MaybeRecordImplicitNullCheck(instruction);
6205 }
6206 // /* LockWord */ lock_word = LockWord(monitor)
6207 static_assert(sizeof(LockWord) == sizeof(int32_t),
6208 "art::LockWord and int32_t have different sizes.");
6209 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6210 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6211 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6212 static_assert(
6213 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6214 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6215
6216 // Load fence to prevent load-load reordering.
6217 // Note that this is a no-op, thanks to the x86-64 memory model.
6218 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6219
6220 // The actual reference load.
6221 // /* HeapReference<Object> */ ref = *src
6222 __ movl(ref_reg, src);
6223
6224 // Object* ref = ref_addr->AsMirrorPtr()
6225 __ MaybeUnpoisonHeapReference(ref_reg);
6226
6227 // Slow path used to mark the object `ref` when it is gray.
6228 SlowPathCode* slow_path =
6229 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6230 AddSlowPath(slow_path);
6231
6232 // if (rb_state == ReadBarrier::gray_ptr_)
6233 // ref = ReadBarrier::Mark(ref);
6234 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6235 __ j(kEqual, slow_path->GetEntryLabel());
6236 __ Bind(slow_path->GetExitLabel());
6237}
6238
6239void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6240 Location out,
6241 Location ref,
6242 Location obj,
6243 uint32_t offset,
6244 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006245 DCHECK(kEmitCompilerReadBarrier);
6246
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006247 // Insert a slow path based read barrier *after* the reference load.
6248 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006249 // If heap poisoning is enabled, the unpoisoning of the loaded
6250 // reference will be carried out by the runtime within the slow
6251 // path.
6252 //
6253 // Note that `ref` currently does not get unpoisoned (when heap
6254 // poisoning is enabled), which is alright as the `ref` argument is
6255 // not used by the artReadBarrierSlow entry point.
6256 //
6257 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6258 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6259 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6260 AddSlowPath(slow_path);
6261
Roland Levillain0d5a2812015-11-13 10:07:31 +00006262 __ jmp(slow_path->GetEntryLabel());
6263 __ Bind(slow_path->GetExitLabel());
6264}
6265
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006266void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6267 Location out,
6268 Location ref,
6269 Location obj,
6270 uint32_t offset,
6271 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006272 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006273 // Baker's read barriers shall be handled by the fast path
6274 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6275 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006276 // If heap poisoning is enabled, unpoisoning will be taken care of
6277 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006278 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006279 } else if (kPoisonHeapReferences) {
6280 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6281 }
6282}
6283
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006284void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6285 Location out,
6286 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006287 DCHECK(kEmitCompilerReadBarrier);
6288
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006289 // Insert a slow path based read barrier *after* the GC root load.
6290 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006291 // Note that GC roots are not affected by heap poisoning, so we do
6292 // not need to do anything special for this here.
6293 SlowPathCode* slow_path =
6294 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6295 AddSlowPath(slow_path);
6296
Roland Levillain0d5a2812015-11-13 10:07:31 +00006297 __ jmp(slow_path->GetEntryLabel());
6298 __ Bind(slow_path->GetExitLabel());
6299}
6300
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006301void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006302 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006303 LOG(FATAL) << "Unreachable";
6304}
6305
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006306void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006307 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006308 LOG(FATAL) << "Unreachable";
6309}
6310
Nicolas Geoffray2e7cd752015-07-10 11:38:52 +01006311void LocationsBuilderX86_64::VisitFakeString(HFakeString* instruction) {
6312 DCHECK(codegen_->IsBaseline());
6313 LocationSummary* locations =
6314 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6315 locations->SetOut(Location::ConstantLocation(GetGraph()->GetNullConstant()));
6316}
6317
6318void InstructionCodeGeneratorX86_64::VisitFakeString(HFakeString* instruction ATTRIBUTE_UNUSED) {
6319 DCHECK(codegen_->IsBaseline());
6320 // Will be generated at use site.
6321}
6322
Mark Mendellfe57faa2015-09-18 09:26:15 -04006323// Simple implementation of packed switch - generate cascaded compare/jumps.
6324void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6325 LocationSummary* locations =
6326 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6327 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006328 locations->AddTemp(Location::RequiresRegister());
6329 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006330}
6331
6332void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6333 int32_t lower_bound = switch_instr->GetStartValue();
Nicolas Geoffrayb4c13762015-12-16 12:06:39 +00006334 int32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006335 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006336 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6337 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6338 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
6339
6340 // Remove the bias, if needed.
6341 Register value_reg_out = value_reg_in.AsRegister();
6342 if (lower_bound != 0) {
6343 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6344 value_reg_out = temp_reg.AsRegister();
6345 }
6346 CpuRegister value_reg(value_reg_out);
6347
6348 // Is the value in range?
Nicolas Geoffrayb4c13762015-12-16 12:06:39 +00006349 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
Mark Mendell9c86b482015-09-18 13:36:07 -04006350 __ cmpl(value_reg, Immediate(num_entries - 1));
6351 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006352
Mark Mendell9c86b482015-09-18 13:36:07 -04006353 // We are in the range of the table.
6354 // Load the address of the jump table in the constant area.
6355 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006356
Mark Mendell9c86b482015-09-18 13:36:07 -04006357 // Load the (signed) offset from the jump table.
6358 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6359
6360 // Add the offset to the address of the table base.
6361 __ addq(temp_reg, base_reg);
6362
6363 // And jump.
6364 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006365}
6366
Mark Mendell92e83bf2015-05-07 11:25:03 -04006367void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6368 if (value == 0) {
6369 __ xorl(dest, dest);
6370 } else if (value > 0 && IsInt<32>(value)) {
6371 // We can use a 32 bit move, as it will zero-extend and is one byte shorter.
6372 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6373 } else {
6374 __ movq(dest, Immediate(value));
6375 }
6376}
6377
Mark Mendellcfa410b2015-05-25 16:02:44 -04006378void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6379 DCHECK(dest.IsDoubleStackSlot());
6380 if (IsInt<32>(value)) {
6381 // Can move directly as an int32 constant.
6382 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6383 Immediate(static_cast<int32_t>(value)));
6384 } else {
6385 Load64BitValue(CpuRegister(TMP), value);
6386 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6387 }
6388}
6389
Mark Mendell9c86b482015-09-18 13:36:07 -04006390/**
6391 * Class to handle late fixup of offsets into constant area.
6392 */
6393class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6394 public:
6395 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6396 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6397
6398 protected:
6399 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6400
6401 CodeGeneratorX86_64* codegen_;
6402
6403 private:
6404 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6405 // Patch the correct offset for the instruction. We use the address of the
6406 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6407 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6408 int32_t relative_position = constant_offset - pos;
6409
6410 // Patch in the right value.
6411 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6412 }
6413
6414 // Location in constant area that the fixup refers to.
6415 size_t offset_into_constant_area_;
6416};
6417
6418/**
6419 t * Class to handle late fixup of offsets to a jump table that will be created in the
6420 * constant area.
6421 */
6422class JumpTableRIPFixup : public RIPFixup {
6423 public:
6424 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6425 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6426
6427 void CreateJumpTable() {
6428 X86_64Assembler* assembler = codegen_->GetAssembler();
6429
6430 // Ensure that the reference to the jump table has the correct offset.
6431 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6432 SetOffset(offset_in_constant_table);
6433
6434 // Compute the offset from the start of the function to this jump table.
6435 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6436
6437 // Populate the jump table with the correct values for the jump table.
6438 int32_t num_entries = switch_instr_->GetNumEntries();
6439 HBasicBlock* block = switch_instr_->GetBlock();
6440 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6441 // The value that we want is the target offset - the position of the table.
6442 for (int32_t i = 0; i < num_entries; i++) {
6443 HBasicBlock* b = successors[i];
6444 Label* l = codegen_->GetLabelOf(b);
6445 DCHECK(l->IsBound());
6446 int32_t offset_to_block = l->Position() - current_table_offset;
6447 assembler->AppendInt32(offset_to_block);
6448 }
6449 }
6450
6451 private:
6452 const HPackedSwitch* switch_instr_;
6453};
6454
Mark Mendellf55c3e02015-03-26 21:07:46 -04006455void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6456 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006457 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006458 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6459 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006460 assembler->Align(4, 0);
6461 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006462
6463 // Populate any jump tables.
6464 for (auto jump_table : fixups_to_jump_tables_) {
6465 jump_table->CreateJumpTable();
6466 }
6467
6468 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006469 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006470 }
6471
6472 // And finish up.
6473 CodeGenerator::Finalize(allocator);
6474}
6475
Mark Mendellf55c3e02015-03-26 21:07:46 -04006476Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6477 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6478 return Address::RIP(fixup);
6479}
6480
6481Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6482 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6483 return Address::RIP(fixup);
6484}
6485
6486Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6487 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6488 return Address::RIP(fixup);
6489}
6490
6491Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6492 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6493 return Address::RIP(fixup);
6494}
6495
Andreas Gampe85b62f22015-09-09 13:15:38 -07006496// TODO: trg as memory.
6497void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6498 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006499 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006500 return;
6501 }
6502
6503 DCHECK_NE(type, Primitive::kPrimVoid);
6504
6505 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6506 if (trg.Equals(return_loc)) {
6507 return;
6508 }
6509
6510 // Let the parallel move resolver take care of all of this.
6511 HParallelMove parallel_move(GetGraph()->GetArena());
6512 parallel_move.AddMove(return_loc, trg, type, nullptr);
6513 GetMoveResolver()->EmitNativeCode(&parallel_move);
6514}
6515
Mark Mendell9c86b482015-09-18 13:36:07 -04006516Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6517 // Create a fixup to be used to create and address the jump table.
6518 JumpTableRIPFixup* table_fixup =
6519 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6520
6521 // We have to populate the jump tables.
6522 fixups_to_jump_tables_.push_back(table_fixup);
6523 return Address::RIP(table_fixup);
6524}
6525
Mark Mendellea5af682015-10-22 17:35:49 -04006526void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6527 const Address& addr_high,
6528 int64_t v,
6529 HInstruction* instruction) {
6530 if (IsInt<32>(v)) {
6531 int32_t v_32 = v;
6532 __ movq(addr_low, Immediate(v_32));
6533 MaybeRecordImplicitNullCheck(instruction);
6534 } else {
6535 // Didn't fit in a register. Do it in pieces.
6536 int32_t low_v = Low32Bits(v);
6537 int32_t high_v = High32Bits(v);
6538 __ movl(addr_low, Immediate(low_v));
6539 MaybeRecordImplicitNullCheck(instruction);
6540 __ movl(addr_high, Immediate(high_v));
6541 }
6542}
6543
Roland Levillain4d027112015-07-01 15:41:14 +01006544#undef __
6545
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006546} // namespace x86_64
6547} // namespace art