blob: e79c1fb227978c3792a599b68031dc94c24b148e [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain62a46b22015-06-01 18:24:13 +010054#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())->
Calin Juravle175dc732015-08-25 15:42:32 +010055#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010056
Andreas Gampe85b62f22015-09-09 13:15:38 -070057class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010058 public:
Nicolas Geoffray39468442014-09-02 15:17:15 +010059 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060
Alexandre Rames2ed20af2015-03-06 13:55:35 +000061 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000062 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000064 if (instruction_->CanThrowIntoCatchBlock()) {
65 // Live registers will be restored in the catch block if caught.
66 SaveLiveRegisters(codegen, instruction_->GetLocations());
67 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000068 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
69 instruction_,
70 instruction_->GetDexPc(),
71 this);
Roland Levillain888d0672015-11-23 18:53:50 +000072 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 }
74
Alexandre Rames8158f282015-08-07 10:26:17 +010075 bool IsFatal() const OVERRIDE { return true; }
76
Alexandre Rames9931f312015-06-19 14:47:01 +010077 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +010080 HNullCheck* const instruction_;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
86 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : instruction_(instruction) {}
87
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
107 HDivZeroCheck* const instruction_;
108 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
109};
110
Andreas Gampe85b62f22015-09-09 13:15:38 -0700111class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000112 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100113 DivRemMinusOneSlowPathX86_64(Register reg, Primitive::Type type, bool is_div)
Calin Juravlebacfec32014-11-14 15:54:36 +0000114 : cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000115
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000117 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000118 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negl(cpu_reg_);
121 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
124
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000125 } else {
126 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 if (is_div_) {
128 __ negq(cpu_reg_);
129 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400130 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 }
Calin Juravled0d48522014-11-04 16:40:20 +0000133 __ jmp(GetExitLabel());
134 }
135
Alexandre Rames9931f312015-06-19 14:47:01 +0100136 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
137
Calin Juravled0d48522014-11-04 16:40:20 +0000138 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000139 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000140 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000141 const bool is_div_;
142 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000143};
144
Andreas Gampe85b62f22015-09-09 13:15:38 -0700145class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000146 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100147 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100148 : instruction_(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000149
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000150 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000151 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000153 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000154 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
155 instruction_,
156 instruction_->GetDexPc(),
157 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000158 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000159 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100160 if (successor_ == nullptr) {
161 __ jmp(GetReturnLabel());
162 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000163 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 }
166
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100167 Label* GetReturnLabel() {
168 DCHECK(successor_ == nullptr);
169 return &return_label_;
170 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000171
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100172 HBasicBlock* GetSuccessor() const {
173 return successor_;
174 }
175
Alexandre Rames9931f312015-06-19 14:47:01 +0100176 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
177
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000178 private:
179 HSuspendCheck* const instruction_;
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100180 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 Label return_label_;
182
183 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
184};
185
Andreas Gampe85b62f22015-09-09 13:15:38 -0700186class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100188 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
189 : instruction_(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100192 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000193 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100194 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000195 if (instruction_->CanThrowIntoCatchBlock()) {
196 // Live registers will be restored in the catch block if caught.
197 SaveLiveRegisters(codegen, instruction_->GetLocations());
198 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000199 // We're moving two locations to locations that could overlap, so we need a parallel
200 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100201 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100203 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000204 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100206 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100207 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
208 Primitive::kPrimInt);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000209 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowArrayBounds),
210 instruction_,
211 instruction_->GetDexPc(),
212 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000213 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100214 }
215
Alexandre Rames8158f282015-08-07 10:26:17 +0100216 bool IsFatal() const OVERRIDE { return true; }
217
Alexandre Rames9931f312015-06-19 14:47:01 +0100218 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
219
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100220 private:
Nicolas Geoffray39468442014-09-02 15:17:15 +0100221 HBoundsCheck* const instruction_;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222
223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100227 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 LoadClassSlowPathX86_64(HLoadClass* cls,
229 HInstruction* at,
230 uint32_t dex_pc,
231 bool do_clinit)
232 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
233 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
234 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000236 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000238 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000241 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000245 x86_64_codegen->InvokeRuntime(do_clinit_ ?
246 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
247 QUICK_ENTRY_POINT(pInitializeType),
248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 public:
289 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {}
290
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 LocationSummary* locations = instruction_->GetLocations();
293 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
294
Roland Levillain0d5a2812015-11-13 10:07:31 +0000295 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000296 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000297 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000298
299 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800300 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000301 Immediate(instruction_->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
303 instruction_,
304 instruction_->GetDexPc(),
305 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000307 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 __ jmp(GetExitLabel());
310 }
311
Alexandre Rames9931f312015-06-19 14:47:01 +0100312 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
313
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000314 private:
315 HLoadString* const instruction_;
316
317 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
318};
319
Andreas Gampe85b62f22015-09-09 13:15:38 -0700320class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000321 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
323 : instruction_(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100327 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
328 : locations->Out();
329 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000330 DCHECK(instruction_->IsCheckCast()
331 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
Roland Levillain0d5a2812015-11-13 10:07:31 +0000333 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000334 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000335
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000336 if (!is_fatal_) {
337 SaveLiveRegisters(codegen, locations);
338 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
340 // We're moving two locations to locations that could overlap, so we need a parallel
341 // move resolver.
342 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000343 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000345 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100346 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100347 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100348 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
349 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000352 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
353 instruction_,
354 dex_pc,
355 this);
356 CheckEntrypointTypes<
357 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 } else {
359 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000360 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
361 instruction_,
362 dex_pc,
363 this);
364 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000365 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000367 if (!is_fatal_) {
368 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000369 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 RestoreLiveRegisters(codegen, locations);
373 __ jmp(GetExitLabel());
374 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375 }
376
Alexandre Rames9931f312015-06-19 14:47:01 +0100377 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
378
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000379 bool IsFatal() const OVERRIDE { return is_fatal_; }
380
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381 private:
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000382 HInstruction* const instruction_;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000384
385 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
386};
387
Andreas Gampe85b62f22015-09-09 13:15:38 -0700388class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389 public:
Aart Bik42249c32016-01-07 15:33:50 -0800390 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391 : instruction_(instruction) {}
392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 __ Bind(GetEntryLabel());
396 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000397 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800398 instruction_,
399 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000400 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000401 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700402 }
403
Alexandre Rames9931f312015-06-19 14:47:01 +0100404 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
405
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406 private:
Aart Bik42249c32016-01-07 15:33:50 -0800407 HDeoptimize* const instruction_;
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700408 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
409};
410
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100411class ArraySetSlowPathX86_64 : public SlowPathCode {
412 public:
413 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : instruction_(instruction) {}
414
415 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
416 LocationSummary* locations = instruction_->GetLocations();
417 __ Bind(GetEntryLabel());
418 SaveLiveRegisters(codegen, locations);
419
420 InvokeRuntimeCallingConvention calling_convention;
421 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
422 parallel_move.AddMove(
423 locations->InAt(0),
424 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
425 Primitive::kPrimNot,
426 nullptr);
427 parallel_move.AddMove(
428 locations->InAt(1),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
430 Primitive::kPrimInt,
431 nullptr);
432 parallel_move.AddMove(
433 locations->InAt(2),
434 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
435 Primitive::kPrimNot,
436 nullptr);
437 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
438
Roland Levillain0d5a2812015-11-13 10:07:31 +0000439 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
440 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
441 instruction_,
442 instruction_->GetDexPc(),
443 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000444 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100445 RestoreLiveRegisters(codegen, locations);
446 __ jmp(GetExitLabel());
447 }
448
449 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
450
451 private:
452 HInstruction* const instruction_;
453
454 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
455};
456
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000457// Slow path marking an object during a read barrier.
458class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
459 public:
460 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location out, Location obj)
461 : instruction_(instruction), out_(out), obj_(obj) {
462 DCHECK(kEmitCompilerReadBarrier);
463 }
464
465 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
466
467 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
468 LocationSummary* locations = instruction_->GetLocations();
469 Register reg_out = out_.AsRegister<Register>();
470 DCHECK(locations->CanCall());
471 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
472 DCHECK(instruction_->IsInstanceFieldGet() ||
473 instruction_->IsStaticFieldGet() ||
474 instruction_->IsArrayGet() ||
475 instruction_->IsLoadClass() ||
476 instruction_->IsLoadString() ||
477 instruction_->IsInstanceOf() ||
478 instruction_->IsCheckCast())
479 << "Unexpected instruction in read barrier marking slow path: "
480 << instruction_->DebugName();
481
482 __ Bind(GetEntryLabel());
483 SaveLiveRegisters(codegen, locations);
484
485 InvokeRuntimeCallingConvention calling_convention;
486 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
487 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), obj_);
488 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
489 instruction_,
490 instruction_->GetDexPc(),
491 this);
492 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
493 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
494
495 RestoreLiveRegisters(codegen, locations);
496 __ jmp(GetExitLabel());
497 }
498
499 private:
500 HInstruction* const instruction_;
501 const Location out_;
502 const Location obj_;
503
504 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
505};
506
Roland Levillain0d5a2812015-11-13 10:07:31 +0000507// Slow path generating a read barrier for a heap reference.
508class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
509 public:
510 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
511 Location out,
512 Location ref,
513 Location obj,
514 uint32_t offset,
515 Location index)
516 : instruction_(instruction),
517 out_(out),
518 ref_(ref),
519 obj_(obj),
520 offset_(offset),
521 index_(index) {
522 DCHECK(kEmitCompilerReadBarrier);
523 // If `obj` is equal to `out` or `ref`, it means the initial
524 // object has been overwritten by (or after) the heap object
525 // reference load to be instrumented, e.g.:
526 //
527 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000528 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000529 //
530 // In that case, we have lost the information about the original
531 // object, and the emitted read barrier cannot work properly.
532 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
533 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
534}
535
536 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
537 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
538 LocationSummary* locations = instruction_->GetLocations();
539 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
540 DCHECK(locations->CanCall());
541 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
542 DCHECK(!instruction_->IsInvoke() ||
543 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000544 instruction_->GetLocations()->Intrinsified()))
545 << "Unexpected instruction in read barrier for heap reference slow path: "
546 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000547
548 __ Bind(GetEntryLabel());
549 SaveLiveRegisters(codegen, locations);
550
551 // We may have to change the index's value, but as `index_` is a
552 // constant member (like other "inputs" of this slow path),
553 // introduce a copy of it, `index`.
554 Location index = index_;
555 if (index_.IsValid()) {
556 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
557 if (instruction_->IsArrayGet()) {
558 // Compute real offset and store it in index_.
559 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
560 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
561 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
562 // We are about to change the value of `index_reg` (see the
563 // calls to art::x86_64::X86_64Assembler::shll and
564 // art::x86_64::X86_64Assembler::AddImmediate below), but it
565 // has not been saved by the previous call to
566 // art::SlowPathCode::SaveLiveRegisters, as it is a
567 // callee-save register --
568 // art::SlowPathCode::SaveLiveRegisters does not consider
569 // callee-save registers, as it has been designed with the
570 // assumption that callee-save registers are supposed to be
571 // handled by the called function. So, as a callee-save
572 // register, `index_reg` _would_ eventually be saved onto
573 // the stack, but it would be too late: we would have
574 // changed its value earlier. Therefore, we manually save
575 // it here into another freely available register,
576 // `free_reg`, chosen of course among the caller-save
577 // registers (as a callee-save `free_reg` register would
578 // exhibit the same problem).
579 //
580 // Note we could have requested a temporary register from
581 // the register allocator instead; but we prefer not to, as
582 // this is a slow path, and we know we can find a
583 // caller-save register that is available.
584 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
585 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
586 index_reg = free_reg;
587 index = Location::RegisterLocation(index_reg);
588 } else {
589 // The initial register stored in `index_` has already been
590 // saved in the call to art::SlowPathCode::SaveLiveRegisters
591 // (as it is not a callee-save register), so we can freely
592 // use it.
593 }
594 // Shifting the index value contained in `index_reg` by the
595 // scale factor (2) cannot overflow in practice, as the
596 // runtime is unable to allocate object arrays with a size
597 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
598 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
599 static_assert(
600 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
601 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
602 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
603 } else {
604 DCHECK(instruction_->IsInvoke());
605 DCHECK(instruction_->GetLocations()->Intrinsified());
606 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
607 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
608 << instruction_->AsInvoke()->GetIntrinsic();
609 DCHECK_EQ(offset_, 0U);
610 DCHECK(index_.IsRegister());
611 }
612 }
613
614 // We're moving two or three locations to locations that could
615 // overlap, so we need a parallel move resolver.
616 InvokeRuntimeCallingConvention calling_convention;
617 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
618 parallel_move.AddMove(ref_,
619 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
620 Primitive::kPrimNot,
621 nullptr);
622 parallel_move.AddMove(obj_,
623 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
624 Primitive::kPrimNot,
625 nullptr);
626 if (index.IsValid()) {
627 parallel_move.AddMove(index,
628 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
629 Primitive::kPrimInt,
630 nullptr);
631 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
632 } else {
633 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
634 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
635 }
636 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
637 instruction_,
638 instruction_->GetDexPc(),
639 this);
640 CheckEntrypointTypes<
641 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
642 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
643
644 RestoreLiveRegisters(codegen, locations);
645 __ jmp(GetExitLabel());
646 }
647
648 const char* GetDescription() const OVERRIDE {
649 return "ReadBarrierForHeapReferenceSlowPathX86_64";
650 }
651
652 private:
653 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
654 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
655 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
656 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
657 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
658 return static_cast<CpuRegister>(i);
659 }
660 }
661 // We shall never fail to find a free caller-save register, as
662 // there are more than two core caller-save registers on x86-64
663 // (meaning it is possible to find one which is different from
664 // `ref` and `obj`).
665 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
666 LOG(FATAL) << "Could not find a free caller-save register";
667 UNREACHABLE();
668 }
669
670 HInstruction* const instruction_;
671 const Location out_;
672 const Location ref_;
673 const Location obj_;
674 const uint32_t offset_;
675 // An additional location containing an index to an array.
676 // Only used for HArrayGet and the UnsafeGetObject &
677 // UnsafeGetObjectVolatile intrinsics.
678 const Location index_;
679
680 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
681};
682
683// Slow path generating a read barrier for a GC root.
684class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
685 public:
686 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000687 : instruction_(instruction), out_(out), root_(root) {
688 DCHECK(kEmitCompilerReadBarrier);
689 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000690
691 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
692 LocationSummary* locations = instruction_->GetLocations();
693 DCHECK(locations->CanCall());
694 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000695 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
696 << "Unexpected instruction in read barrier for GC root slow path: "
697 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000698
699 __ Bind(GetEntryLabel());
700 SaveLiveRegisters(codegen, locations);
701
702 InvokeRuntimeCallingConvention calling_convention;
703 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
704 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
705 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
706 instruction_,
707 instruction_->GetDexPc(),
708 this);
709 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
710 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
711
712 RestoreLiveRegisters(codegen, locations);
713 __ jmp(GetExitLabel());
714 }
715
716 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
717
718 private:
719 HInstruction* const instruction_;
720 const Location out_;
721 const Location root_;
722
723 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
724};
725
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100726#undef __
Roland Levillain62a46b22015-06-01 18:24:13 +0100727#define __ down_cast<X86_64Assembler*>(GetAssembler())->
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100728
Roland Levillain4fa13f62015-07-06 18:11:54 +0100729inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700730 switch (cond) {
731 case kCondEQ: return kEqual;
732 case kCondNE: return kNotEqual;
733 case kCondLT: return kLess;
734 case kCondLE: return kLessEqual;
735 case kCondGT: return kGreater;
736 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700737 case kCondB: return kBelow;
738 case kCondBE: return kBelowEqual;
739 case kCondA: return kAbove;
740 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700741 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100742 LOG(FATAL) << "Unreachable";
743 UNREACHABLE();
744}
745
Aart Bike9f37602015-10-09 11:15:55 -0700746// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100747inline Condition X86_64FPCondition(IfCondition cond) {
748 switch (cond) {
749 case kCondEQ: return kEqual;
750 case kCondNE: return kNotEqual;
751 case kCondLT: return kBelow;
752 case kCondLE: return kBelowEqual;
753 case kCondGT: return kAbove;
754 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700755 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100756 };
757 LOG(FATAL) << "Unreachable";
758 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700759}
760
Vladimir Markodc151b22015-10-15 18:02:30 +0100761HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
762 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
763 MethodReference target_method ATTRIBUTE_UNUSED) {
764 switch (desired_dispatch_info.code_ptr_location) {
765 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
766 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
767 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
768 return HInvokeStaticOrDirect::DispatchInfo {
769 desired_dispatch_info.method_load_kind,
770 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
771 desired_dispatch_info.method_load_data,
772 0u
773 };
774 default:
775 return desired_dispatch_info;
776 }
777}
778
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800779void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100780 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800781 // All registers are assumed to be correctly set up.
782
Vladimir Marko58155012015-08-19 12:49:41 +0000783 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
784 switch (invoke->GetMethodLoadKind()) {
785 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
786 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000787 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000788 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000789 break;
790 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000791 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000792 break;
793 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
794 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
795 break;
796 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
797 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
798 method_patches_.emplace_back(invoke->GetTargetMethod());
799 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
800 break;
801 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000802 pc_relative_dex_cache_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
803 invoke->GetDexCacheArrayOffset());
Vladimir Marko58155012015-08-19 12:49:41 +0000804 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000805 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko58155012015-08-19 12:49:41 +0000806 // Bind the label at the end of the "movl" insn.
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000807 __ Bind(&pc_relative_dex_cache_patches_.back().label);
Vladimir Marko58155012015-08-19 12:49:41 +0000808 break;
809 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000810 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000811 Register method_reg;
812 CpuRegister reg = temp.AsRegister<CpuRegister>();
813 if (current_method.IsRegister()) {
814 method_reg = current_method.AsRegister<Register>();
815 } else {
816 DCHECK(invoke->GetLocations()->Intrinsified());
817 DCHECK(!current_method.IsValid());
818 method_reg = reg.AsRegister();
819 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
820 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000821 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100822 __ movq(reg,
823 Address(CpuRegister(method_reg),
824 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000825 // temp = temp[index_in_cache]
826 uint32_t index_in_cache = invoke->GetTargetMethod().dex_method_index;
827 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
828 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100829 }
Vladimir Marko58155012015-08-19 12:49:41 +0000830 }
831
832 switch (invoke->GetCodePtrLocation()) {
833 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
834 __ call(&frame_entry_label_);
835 break;
836 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
837 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
838 Label* label = &relative_call_patches_.back().label;
839 __ call(label); // Bind to the patch label, override at link time.
840 __ Bind(label); // Bind the label at the end of the "call" insn.
841 break;
842 }
843 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
844 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100845 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
846 LOG(FATAL) << "Unsupported";
847 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000848 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
849 // (callee_method + offset_of_quick_compiled_code)()
850 __ call(Address(callee_method.AsRegister<CpuRegister>(),
851 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
852 kX86_64WordSize).SizeValue()));
853 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000854 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800855
856 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800857}
858
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000859void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
860 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
861 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
862 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000863
864 // Use the calling convention instead of the location of the receiver, as
865 // intrinsics may have put the receiver in a different register. In the intrinsics
866 // slow path, the arguments have been moved to the right place, so here we are
867 // guaranteed that the receiver is the first register of the calling convention.
868 InvokeDexCallingConvention calling_convention;
869 Register receiver = calling_convention.GetRegisterAt(0);
870
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000871 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000872 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000873 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000874 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000875 // Instead of simply (possibly) unpoisoning `temp` here, we should
876 // emit a read barrier for the previous class reference load.
877 // However this is not required in practice, as this is an
878 // intermediate/temporary reference and because the current
879 // concurrent copying collector keeps the from-space memory
880 // intact/accessible until the end of the marking phase (the
881 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000882 __ MaybeUnpoisonHeapReference(temp);
883 // temp = temp->GetMethodAt(method_offset);
884 __ movq(temp, Address(temp, method_offset));
885 // call temp->GetEntryPoint();
886 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
887 kX86_64WordSize).SizeValue()));
888}
889
Vladimir Marko58155012015-08-19 12:49:41 +0000890void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
891 DCHECK(linker_patches->empty());
892 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000893 method_patches_.size() +
894 relative_call_patches_.size() +
895 pc_relative_dex_cache_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000896 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000897 // The label points to the end of the "movl" insn but the literal offset for method
898 // patch needs to point to the embedded constant which occupies the last 4 bytes.
899 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000900 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000901 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000902 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
903 info.target_method.dex_file,
904 info.target_method.dex_method_index));
905 }
906 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000907 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000908 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
909 info.target_method.dex_file,
910 info.target_method.dex_method_index));
911 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000912 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
913 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000914 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
915 &info.target_dex_file,
916 info.label.Position(),
917 info.element_offset));
918 }
919}
920
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100921void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100922 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100923}
924
925void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100926 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100927}
928
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100929size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
930 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
931 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100932}
933
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100934size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
935 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
936 return kX86_64WordSize;
937}
938
939size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
940 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
941 return kX86_64WordSize;
942}
943
944size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
945 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
946 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100947}
948
Calin Juravle175dc732015-08-25 15:42:32 +0100949void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
950 HInstruction* instruction,
951 uint32_t dex_pc,
952 SlowPathCode* slow_path) {
953 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
954 instruction,
955 dex_pc,
956 slow_path);
957}
958
959void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +0100960 HInstruction* instruction,
961 uint32_t dex_pc,
962 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100963 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000964 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +0100965 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +0100966}
967
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000968static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000969// Use a fake return address register to mimic Quick.
970static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400971CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000972 const X86_64InstructionSetFeatures& isa_features,
973 const CompilerOptions& compiler_options,
974 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +0000975 : CodeGenerator(graph,
976 kNumberOfCpuRegisters,
977 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000978 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000979 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
980 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000981 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000982 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
983 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100984 compiler_options,
985 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100986 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100987 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +0000988 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -0400989 move_resolver_(graph->GetArena(), this),
Mark Mendellf55c3e02015-03-26 21:07:46 -0400990 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +0000991 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +0100992 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
993 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000994 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -0400995 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000996 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
997}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100998
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100999InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1000 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001001 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001002 assembler_(codegen->GetAssembler()),
1003 codegen_(codegen) {}
1004
David Brazdil58282f42016-01-14 12:45:10 +00001005void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001006 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001007 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001008
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001009 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001010 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001011}
1012
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001013static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001014 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001015}
David Srbecky9d8606d2015-04-12 09:35:32 +01001016
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001017static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001018 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001019}
1020
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001021void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001022 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001023 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001024 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001025 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001026 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001027
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001028 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001029 __ testq(CpuRegister(RAX), Address(
1030 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001031 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001032 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001033
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001034 if (HasEmptyFrame()) {
1035 return;
1036 }
1037
Nicolas Geoffray98893962015-01-21 12:32:32 +00001038 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001039 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001040 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001041 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001042 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1043 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001044 }
1045 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001046
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001047 int adjust = GetFrameSize() - GetCoreSpillSize();
1048 __ subq(CpuRegister(RSP), Immediate(adjust));
1049 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001050 uint32_t xmm_spill_location = GetFpuSpillStart();
1051 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001052
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001053 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1054 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1056 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1057 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001058 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001059 }
1060
Mathieu Chartiere401d142015-04-22 13:56:20 -07001061 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001062 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001063}
1064
1065void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001066 __ cfi().RememberState();
1067 if (!HasEmptyFrame()) {
1068 uint32_t xmm_spill_location = GetFpuSpillStart();
1069 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1070 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1071 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1072 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1073 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1074 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1075 }
1076 }
1077
1078 int adjust = GetFrameSize() - GetCoreSpillSize();
1079 __ addq(CpuRegister(RSP), Immediate(adjust));
1080 __ cfi().AdjustCFAOffset(-adjust);
1081
1082 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1083 Register reg = kCoreCalleeSaves[i];
1084 if (allocated_registers_.ContainsCoreRegister(reg)) {
1085 __ popq(CpuRegister(reg));
1086 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1087 __ cfi().Restore(DWARFReg(reg));
1088 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001089 }
1090 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001091 __ ret();
1092 __ cfi().RestoreState();
1093 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001094}
1095
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001096void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1097 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001098}
1099
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001100Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
1101 switch (load->GetType()) {
1102 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001103 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001104 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001105
1106 case Primitive::kPrimInt:
1107 case Primitive::kPrimNot:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001108 case Primitive::kPrimFloat:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001109 return Location::StackSlot(GetStackSlot(load->GetLocal()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001110
1111 case Primitive::kPrimBoolean:
1112 case Primitive::kPrimByte:
1113 case Primitive::kPrimChar:
1114 case Primitive::kPrimShort:
1115 case Primitive::kPrimVoid:
1116 LOG(FATAL) << "Unexpected type " << load->GetType();
Andreas Gampe65b798e2015-04-06 09:35:22 -07001117 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001118 }
1119
1120 LOG(FATAL) << "Unreachable";
Andreas Gampe65b798e2015-04-06 09:35:22 -07001121 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001122}
1123
1124void CodeGeneratorX86_64::Move(Location destination, Location source) {
1125 if (source.Equals(destination)) {
1126 return;
1127 }
1128 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001129 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001130 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001131 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001132 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001133 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001134 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001135 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1136 } else if (source.IsConstant()) {
1137 HConstant* constant = source.GetConstant();
1138 if (constant->IsLongConstant()) {
1139 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1140 } else {
1141 Load32BitValue(dest, GetInt32ValueOf(constant));
1142 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001143 } else {
1144 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001145 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001146 }
1147 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001148 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001149 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001150 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001151 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001152 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1153 } else if (source.IsConstant()) {
1154 HConstant* constant = source.GetConstant();
1155 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1156 if (constant->IsFloatConstant()) {
1157 Load32BitValue(dest, static_cast<int32_t>(value));
1158 } else {
1159 Load64BitValue(dest, value);
1160 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001161 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001162 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001163 } else {
1164 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001165 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001166 }
1167 } else if (destination.IsStackSlot()) {
1168 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001169 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001170 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001171 } else if (source.IsFpuRegister()) {
1172 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001173 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001174 } else if (source.IsConstant()) {
1175 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001176 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001177 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001178 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001179 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001180 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1181 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001182 }
1183 } else {
1184 DCHECK(destination.IsDoubleStackSlot());
1185 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001186 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001187 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001188 } else if (source.IsFpuRegister()) {
1189 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001190 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001191 } else if (source.IsConstant()) {
1192 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001193 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001194 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001195 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001196 } else {
1197 DCHECK(constant->IsLongConstant());
1198 value = constant->AsLongConstant()->GetValue();
1199 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001200 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001201 } else {
1202 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001203 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1204 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001205 }
1206 }
1207}
1208
Calin Juravle175dc732015-08-25 15:42:32 +01001209void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1210 DCHECK(location.IsRegister());
1211 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1212}
1213
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214void CodeGeneratorX86_64::MoveLocation(
1215 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1216 Move(dst, src);
1217}
1218
1219void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1220 if (location.IsRegister()) {
1221 locations->AddTemp(location);
1222 } else {
1223 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1224 }
1225}
1226
David Brazdilfc6a86a2015-06-26 10:33:45 +00001227void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001228 DCHECK(!successor->IsExitBlock());
1229
1230 HBasicBlock* block = got->GetBlock();
1231 HInstruction* previous = got->GetPrevious();
1232
1233 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001234 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001235 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1236 return;
1237 }
1238
1239 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1240 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1241 }
1242 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001243 __ jmp(codegen_->GetLabelOf(successor));
1244 }
1245}
1246
David Brazdilfc6a86a2015-06-26 10:33:45 +00001247void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1248 got->SetLocations(nullptr);
1249}
1250
1251void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1252 HandleGoto(got, got->GetSuccessor());
1253}
1254
1255void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1256 try_boundary->SetLocations(nullptr);
1257}
1258
1259void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1260 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1261 if (!successor->IsExitBlock()) {
1262 HandleGoto(try_boundary, successor);
1263 }
1264}
1265
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001266void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1267 exit->SetLocations(nullptr);
1268}
1269
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001270void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001271}
1272
Mark Mendell152408f2015-12-31 12:28:50 -05001273template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001274void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001275 LabelType* true_label,
1276 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001277 if (cond->IsFPConditionTrueIfNaN()) {
1278 __ j(kUnordered, true_label);
1279 } else if (cond->IsFPConditionFalseIfNaN()) {
1280 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001281 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001282 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001283}
1284
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001285void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001286 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001287
Mark Mendellc4701932015-04-10 13:18:51 -04001288 Location left = locations->InAt(0);
1289 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001290 Primitive::Type type = condition->InputAt(0)->GetType();
1291 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001292 case Primitive::kPrimBoolean:
1293 case Primitive::kPrimByte:
1294 case Primitive::kPrimChar:
1295 case Primitive::kPrimShort:
1296 case Primitive::kPrimInt:
1297 case Primitive::kPrimNot: {
1298 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1299 if (right.IsConstant()) {
1300 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1301 if (value == 0) {
1302 __ testl(left_reg, left_reg);
1303 } else {
1304 __ cmpl(left_reg, Immediate(value));
1305 }
1306 } else if (right.IsStackSlot()) {
1307 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1308 } else {
1309 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1310 }
1311 break;
1312 }
Mark Mendellc4701932015-04-10 13:18:51 -04001313 case Primitive::kPrimLong: {
1314 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1315 if (right.IsConstant()) {
1316 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001317 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001318 } else if (right.IsDoubleStackSlot()) {
1319 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1320 } else {
1321 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1322 }
Mark Mendellc4701932015-04-10 13:18:51 -04001323 break;
1324 }
1325 case Primitive::kPrimFloat: {
1326 if (right.IsFpuRegister()) {
1327 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1328 } else if (right.IsConstant()) {
1329 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1330 codegen_->LiteralFloatAddress(
1331 right.GetConstant()->AsFloatConstant()->GetValue()));
1332 } else {
1333 DCHECK(right.IsStackSlot());
1334 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1335 Address(CpuRegister(RSP), right.GetStackIndex()));
1336 }
Mark Mendellc4701932015-04-10 13:18:51 -04001337 break;
1338 }
1339 case Primitive::kPrimDouble: {
1340 if (right.IsFpuRegister()) {
1341 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1342 } else if (right.IsConstant()) {
1343 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1344 codegen_->LiteralDoubleAddress(
1345 right.GetConstant()->AsDoubleConstant()->GetValue()));
1346 } else {
1347 DCHECK(right.IsDoubleStackSlot());
1348 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1349 Address(CpuRegister(RSP), right.GetStackIndex()));
1350 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001351 break;
1352 }
1353 default:
1354 LOG(FATAL) << "Unexpected condition type " << type;
1355 }
1356}
1357
1358template<class LabelType>
1359void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1360 LabelType* true_target_in,
1361 LabelType* false_target_in) {
1362 // Generated branching requires both targets to be explicit. If either of the
1363 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1364 LabelType fallthrough_target;
1365 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1366 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1367
1368 // Generate the comparison to set the CC.
1369 GenerateCompareTest(condition);
1370
1371 // Now generate the correct jump(s).
1372 Primitive::Type type = condition->InputAt(0)->GetType();
1373 switch (type) {
1374 case Primitive::kPrimLong: {
1375 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1376 break;
1377 }
1378 case Primitive::kPrimFloat: {
1379 GenerateFPJumps(condition, true_target, false_target);
1380 break;
1381 }
1382 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001383 GenerateFPJumps(condition, true_target, false_target);
1384 break;
1385 }
1386 default:
1387 LOG(FATAL) << "Unexpected condition type " << type;
1388 }
1389
David Brazdil0debae72015-11-12 18:37:00 +00001390 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001391 __ jmp(false_target);
1392 }
David Brazdil0debae72015-11-12 18:37:00 +00001393
1394 if (fallthrough_target.IsLinked()) {
1395 __ Bind(&fallthrough_target);
1396 }
Mark Mendellc4701932015-04-10 13:18:51 -04001397}
1398
David Brazdil0debae72015-11-12 18:37:00 +00001399static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1400 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1401 // are set only strictly before `branch`. We can't use the eflags on long
1402 // conditions if they are materialized due to the complex branching.
1403 return cond->IsCondition() &&
1404 cond->GetNext() == branch &&
1405 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1406}
1407
Mark Mendell152408f2015-12-31 12:28:50 -05001408template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001409void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001410 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001411 LabelType* true_target,
1412 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001413 HInstruction* cond = instruction->InputAt(condition_input_index);
1414
1415 if (true_target == nullptr && false_target == nullptr) {
1416 // Nothing to do. The code always falls through.
1417 return;
1418 } else if (cond->IsIntConstant()) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001419 // Constant condition, statically compared against 1.
David Brazdil0debae72015-11-12 18:37:00 +00001420 if (cond->AsIntConstant()->IsOne()) {
1421 if (true_target != nullptr) {
1422 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001423 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001424 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001425 DCHECK(cond->AsIntConstant()->IsZero());
1426 if (false_target != nullptr) {
1427 __ jmp(false_target);
1428 }
1429 }
1430 return;
1431 }
1432
1433 // The following code generates these patterns:
1434 // (1) true_target == nullptr && false_target != nullptr
1435 // - opposite condition true => branch to false_target
1436 // (2) true_target != nullptr && false_target == nullptr
1437 // - condition true => branch to true_target
1438 // (3) true_target != nullptr && false_target != nullptr
1439 // - condition true => branch to true_target
1440 // - branch to false_target
1441 if (IsBooleanValueOrMaterializedCondition(cond)) {
1442 if (AreEflagsSetFrom(cond, instruction)) {
1443 if (true_target == nullptr) {
1444 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1445 } else {
1446 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1447 }
1448 } else {
1449 // Materialized condition, compare against 0.
1450 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1451 if (lhs.IsRegister()) {
1452 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1453 } else {
1454 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1455 }
1456 if (true_target == nullptr) {
1457 __ j(kEqual, false_target);
1458 } else {
1459 __ j(kNotEqual, true_target);
1460 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001461 }
1462 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001463 // Condition has not been materialized, use its inputs as the
1464 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001465 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001466
David Brazdil0debae72015-11-12 18:37:00 +00001467 // If this is a long or FP comparison that has been folded into
1468 // the HCondition, generate the comparison directly.
1469 Primitive::Type type = condition->InputAt(0)->GetType();
1470 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1471 GenerateCompareTestAndBranch(condition, true_target, false_target);
1472 return;
1473 }
1474
1475 Location lhs = condition->GetLocations()->InAt(0);
1476 Location rhs = condition->GetLocations()->InAt(1);
1477 if (rhs.IsRegister()) {
1478 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1479 } else if (rhs.IsConstant()) {
1480 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001481 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001482 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001483 __ cmpl(lhs.AsRegister<CpuRegister>(),
1484 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1485 }
1486 if (true_target == nullptr) {
1487 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1488 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001489 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001490 }
Dave Allison20dfc792014-06-16 20:44:29 -07001491 }
David Brazdil0debae72015-11-12 18:37:00 +00001492
1493 // If neither branch falls through (case 3), the conditional branch to `true_target`
1494 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1495 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001496 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001497 }
1498}
1499
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001500void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1502 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001503 locations->SetInAt(0, Location::Any());
1504 }
1505}
1506
1507void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001508 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1509 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1510 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1511 nullptr : codegen_->GetLabelOf(true_successor);
1512 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1513 nullptr : codegen_->GetLabelOf(false_successor);
1514 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001515}
1516
1517void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1518 LocationSummary* locations = new (GetGraph()->GetArena())
1519 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001520 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001521 locations->SetInAt(0, Location::Any());
1522 }
1523}
1524
1525void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001526 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001527 GenerateTestAndBranch<Label>(deoptimize,
1528 /* condition_input_index */ 0,
1529 slow_path->GetEntryLabel(),
1530 /* false_target */ nullptr);
1531}
1532
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001533static bool SelectCanUseCMOV(HSelect* select) {
1534 // There are no conditional move instructions for XMMs.
1535 if (Primitive::IsFloatingPointType(select->GetType())) {
1536 return false;
1537 }
1538
1539 // A FP condition doesn't generate the single CC that we need.
1540 HInstruction* condition = select->GetCondition();
1541 if (condition->IsCondition() &&
1542 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1543 return false;
1544 }
1545
1546 // We can generate a CMOV for this Select.
1547 return true;
1548}
1549
David Brazdil74eb1b22015-12-14 11:44:01 +00001550void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1551 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1552 if (Primitive::IsFloatingPointType(select->GetType())) {
1553 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001554 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001555 } else {
1556 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001557 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001558 if (select->InputAt(1)->IsConstant()) {
1559 locations->SetInAt(1, Location::RequiresRegister());
1560 } else {
1561 locations->SetInAt(1, Location::Any());
1562 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001563 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001564 locations->SetInAt(1, Location::Any());
1565 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001566 }
1567 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1568 locations->SetInAt(2, Location::RequiresRegister());
1569 }
1570 locations->SetOut(Location::SameAsFirstInput());
1571}
1572
1573void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1574 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001575 if (SelectCanUseCMOV(select)) {
1576 // If both the condition and the source types are integer, we can generate
1577 // a CMOV to implement Select.
1578 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001579 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001580 DCHECK(locations->InAt(0).Equals(locations->Out()));
1581
1582 HInstruction* select_condition = select->GetCondition();
1583 Condition cond = kNotEqual;
1584
1585 // Figure out how to test the 'condition'.
1586 if (select_condition->IsCondition()) {
1587 HCondition* condition = select_condition->AsCondition();
1588 if (!condition->IsEmittedAtUseSite()) {
1589 // This was a previously materialized condition.
1590 // Can we use the existing condition code?
1591 if (AreEflagsSetFrom(condition, select)) {
1592 // Materialization was the previous instruction. Condition codes are right.
1593 cond = X86_64IntegerCondition(condition->GetCondition());
1594 } else {
1595 // No, we have to recreate the condition code.
1596 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1597 __ testl(cond_reg, cond_reg);
1598 }
1599 } else {
1600 GenerateCompareTest(condition);
1601 cond = X86_64IntegerCondition(condition->GetCondition());
1602 }
1603 } else {
1604 // Must be a boolean condition, which needs to be compared to 0.
1605 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1606 __ testl(cond_reg, cond_reg);
1607 }
1608
1609 // If the condition is true, overwrite the output, which already contains false.
1610 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001611 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1612 if (value_true_loc.IsRegister()) {
1613 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1614 } else {
1615 __ cmov(cond,
1616 value_false,
1617 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1618 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001619 } else {
1620 NearLabel false_target;
1621 GenerateTestAndBranch<NearLabel>(select,
1622 /* condition_input_index */ 2,
1623 /* true_target */ nullptr,
1624 &false_target);
1625 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1626 __ Bind(&false_target);
1627 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001628}
1629
David Srbecky0cf44932015-12-09 14:09:59 +00001630void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1631 new (GetGraph()->GetArena()) LocationSummary(info);
1632}
1633
1634void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
David Srbeckyc7098ff2016-02-09 14:30:11 +00001635 codegen_->MaybeRecordNativeDebugInfo(info, info->GetDexPc());
1636}
1637
1638void CodeGeneratorX86_64::GenerateNop() {
1639 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001640}
1641
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001642void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
1643 local->SetLocations(nullptr);
1644}
1645
1646void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
1647 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
1648}
1649
1650void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
1651 local->SetLocations(nullptr);
1652}
1653
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001654void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001655 // Nothing to do, this is driven by the code generator.
1656}
1657
1658void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001659 LocationSummary* locations =
1660 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001661 switch (store->InputAt(1)->GetType()) {
1662 case Primitive::kPrimBoolean:
1663 case Primitive::kPrimByte:
1664 case Primitive::kPrimChar:
1665 case Primitive::kPrimShort:
1666 case Primitive::kPrimInt:
1667 case Primitive::kPrimNot:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001668 case Primitive::kPrimFloat:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001669 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1670 break;
1671
1672 case Primitive::kPrimLong:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001673 case Primitive::kPrimDouble:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001674 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1675 break;
1676
1677 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001678 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001679 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001680}
1681
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001682void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001683}
1684
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001685void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001686 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001687 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001688 // Handle the long/FP comparisons made in instruction simplification.
1689 switch (cond->InputAt(0)->GetType()) {
1690 case Primitive::kPrimLong:
1691 locations->SetInAt(0, Location::RequiresRegister());
1692 locations->SetInAt(1, Location::Any());
1693 break;
1694 case Primitive::kPrimFloat:
1695 case Primitive::kPrimDouble:
1696 locations->SetInAt(0, Location::RequiresFpuRegister());
1697 locations->SetInAt(1, Location::Any());
1698 break;
1699 default:
1700 locations->SetInAt(0, Location::RequiresRegister());
1701 locations->SetInAt(1, Location::Any());
1702 break;
1703 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001704 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001705 locations->SetOut(Location::RequiresRegister());
1706 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001707}
1708
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001709void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001710 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001711 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001712 }
Mark Mendellc4701932015-04-10 13:18:51 -04001713
1714 LocationSummary* locations = cond->GetLocations();
1715 Location lhs = locations->InAt(0);
1716 Location rhs = locations->InAt(1);
1717 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001718 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001719
1720 switch (cond->InputAt(0)->GetType()) {
1721 default:
1722 // Integer case.
1723
1724 // Clear output register: setcc only sets the low byte.
1725 __ xorl(reg, reg);
1726
1727 if (rhs.IsRegister()) {
1728 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1729 } else if (rhs.IsConstant()) {
1730 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001731 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001732 } else {
1733 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1734 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001735 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001736 return;
1737 case Primitive::kPrimLong:
1738 // Clear output register: setcc only sets the low byte.
1739 __ xorl(reg, reg);
1740
1741 if (rhs.IsRegister()) {
1742 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1743 } else if (rhs.IsConstant()) {
1744 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001745 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001746 } else {
1747 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1748 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001749 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001750 return;
1751 case Primitive::kPrimFloat: {
1752 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1753 if (rhs.IsConstant()) {
1754 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1755 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1756 } else if (rhs.IsStackSlot()) {
1757 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1758 } else {
1759 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1760 }
1761 GenerateFPJumps(cond, &true_label, &false_label);
1762 break;
1763 }
1764 case Primitive::kPrimDouble: {
1765 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1766 if (rhs.IsConstant()) {
1767 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1768 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1769 } else if (rhs.IsDoubleStackSlot()) {
1770 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1771 } else {
1772 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1773 }
1774 GenerateFPJumps(cond, &true_label, &false_label);
1775 break;
1776 }
1777 }
1778
1779 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001780 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001781
Roland Levillain4fa13f62015-07-06 18:11:54 +01001782 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001783 __ Bind(&false_label);
1784 __ xorl(reg, reg);
1785 __ jmp(&done_label);
1786
Roland Levillain4fa13f62015-07-06 18:11:54 +01001787 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001788 __ Bind(&true_label);
1789 __ movl(reg, Immediate(1));
1790 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001791}
1792
1793void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001794 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001795}
1796
1797void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001798 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001799}
1800
1801void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001802 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001803}
1804
1805void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001806 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001807}
1808
1809void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001810 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001811}
1812
1813void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001814 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001815}
1816
1817void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001818 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001819}
1820
1821void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001822 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001823}
1824
1825void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001826 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001827}
1828
1829void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001830 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001831}
1832
1833void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001834 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001835}
1836
1837void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001838 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001839}
1840
Aart Bike9f37602015-10-09 11:15:55 -07001841void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001842 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001843}
1844
1845void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001846 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001847}
1848
1849void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001850 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001851}
1852
1853void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001854 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001855}
1856
1857void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001858 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001859}
1860
1861void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001862 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001863}
1864
1865void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001866 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001867}
1868
1869void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001870 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001871}
1872
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001873void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001874 LocationSummary* locations =
1875 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001876 switch (compare->InputAt(0)->GetType()) {
Aart Bika19616e2016-02-01 18:57:58 -08001877 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001878 case Primitive::kPrimLong: {
1879 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001880 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001881 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1882 break;
1883 }
1884 case Primitive::kPrimFloat:
1885 case Primitive::kPrimDouble: {
1886 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001887 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001888 locations->SetOut(Location::RequiresRegister());
1889 break;
1890 }
1891 default:
1892 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1893 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001894}
1895
1896void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001897 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001898 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001899 Location left = locations->InAt(0);
1900 Location right = locations->InAt(1);
1901
Mark Mendell0c9497d2015-08-21 09:30:05 -04001902 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001903 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001904 Condition less_cond = kLess;
1905
Calin Juravleddb7df22014-11-25 20:56:51 +00001906 switch (type) {
Aart Bika19616e2016-02-01 18:57:58 -08001907 case Primitive::kPrimInt: {
1908 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1909 if (right.IsConstant()) {
1910 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1911 codegen_->Compare32BitValue(left_reg, value);
1912 } else if (right.IsStackSlot()) {
1913 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1914 } else {
1915 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1916 }
1917 break;
1918 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001919 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001920 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1921 if (right.IsConstant()) {
1922 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001923 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001924 } else if (right.IsDoubleStackSlot()) {
1925 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001926 } else {
1927 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1928 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001929 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001930 }
1931 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001932 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1933 if (right.IsConstant()) {
1934 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1935 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1936 } else if (right.IsStackSlot()) {
1937 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1938 } else {
1939 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1940 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001941 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001942 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001943 break;
1944 }
1945 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001946 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1947 if (right.IsConstant()) {
1948 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1949 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1950 } else if (right.IsDoubleStackSlot()) {
1951 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1952 } else {
1953 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1954 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001955 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001956 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001957 break;
1958 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001959 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001960 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001961 }
Aart Bika19616e2016-02-01 18:57:58 -08001962
Calin Juravleddb7df22014-11-25 20:56:51 +00001963 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001964 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001965 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001966
Calin Juravle91debbc2014-11-26 19:01:09 +00001967 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001968 __ movl(out, Immediate(1));
1969 __ jmp(&done);
1970
1971 __ Bind(&less);
1972 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001973
1974 __ Bind(&done);
1975}
1976
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001977void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001978 LocationSummary* locations =
1979 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001980 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001981}
1982
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001983void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001984 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001985}
1986
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001987void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1988 LocationSummary* locations =
1989 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1990 locations->SetOut(Location::ConstantLocation(constant));
1991}
1992
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001993void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001994 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001995}
1996
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001997void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001998 LocationSummary* locations =
1999 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002000 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002001}
2002
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002003void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002004 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002005}
2006
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002007void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2008 LocationSummary* locations =
2009 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2010 locations->SetOut(Location::ConstantLocation(constant));
2011}
2012
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002013void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002014 // Will be generated at use site.
2015}
2016
2017void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2018 LocationSummary* locations =
2019 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2020 locations->SetOut(Location::ConstantLocation(constant));
2021}
2022
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002023void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2024 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002025 // Will be generated at use site.
2026}
2027
Calin Juravle27df7582015-04-17 19:12:31 +01002028void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2029 memory_barrier->SetLocations(nullptr);
2030}
2031
2032void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002033 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002034}
2035
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002036void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2037 ret->SetLocations(nullptr);
2038}
2039
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002040void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002041 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002042}
2043
2044void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002045 LocationSummary* locations =
2046 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002047 switch (ret->InputAt(0)->GetType()) {
2048 case Primitive::kPrimBoolean:
2049 case Primitive::kPrimByte:
2050 case Primitive::kPrimChar:
2051 case Primitive::kPrimShort:
2052 case Primitive::kPrimInt:
2053 case Primitive::kPrimNot:
2054 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002055 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002056 break;
2057
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002058 case Primitive::kPrimFloat:
2059 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002060 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002061 break;
2062
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002063 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002064 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002065 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002066}
2067
2068void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2069 if (kIsDebugBuild) {
2070 switch (ret->InputAt(0)->GetType()) {
2071 case Primitive::kPrimBoolean:
2072 case Primitive::kPrimByte:
2073 case Primitive::kPrimChar:
2074 case Primitive::kPrimShort:
2075 case Primitive::kPrimInt:
2076 case Primitive::kPrimNot:
2077 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002078 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002079 break;
2080
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002081 case Primitive::kPrimFloat:
2082 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002083 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002084 XMM0);
2085 break;
2086
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002087 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002088 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002089 }
2090 }
2091 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002092}
2093
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002094Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2095 switch (type) {
2096 case Primitive::kPrimBoolean:
2097 case Primitive::kPrimByte:
2098 case Primitive::kPrimChar:
2099 case Primitive::kPrimShort:
2100 case Primitive::kPrimInt:
2101 case Primitive::kPrimNot:
2102 case Primitive::kPrimLong:
2103 return Location::RegisterLocation(RAX);
2104
2105 case Primitive::kPrimVoid:
2106 return Location::NoLocation();
2107
2108 case Primitive::kPrimDouble:
2109 case Primitive::kPrimFloat:
2110 return Location::FpuRegisterLocation(XMM0);
2111 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002112
2113 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002114}
2115
2116Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2117 return Location::RegisterLocation(kMethodRegisterArgument);
2118}
2119
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002120Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002121 switch (type) {
2122 case Primitive::kPrimBoolean:
2123 case Primitive::kPrimByte:
2124 case Primitive::kPrimChar:
2125 case Primitive::kPrimShort:
2126 case Primitive::kPrimInt:
2127 case Primitive::kPrimNot: {
2128 uint32_t index = gp_index_++;
2129 stack_index_++;
2130 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002131 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002132 } else {
2133 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2134 }
2135 }
2136
2137 case Primitive::kPrimLong: {
2138 uint32_t index = gp_index_;
2139 stack_index_ += 2;
2140 if (index < calling_convention.GetNumberOfRegisters()) {
2141 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002142 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002143 } else {
2144 gp_index_ += 2;
2145 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2146 }
2147 }
2148
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002149 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002150 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002151 stack_index_++;
2152 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002153 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002154 } else {
2155 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2156 }
2157 }
2158
2159 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002160 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002161 stack_index_ += 2;
2162 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002163 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002164 } else {
2165 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2166 }
2167 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002168
2169 case Primitive::kPrimVoid:
2170 LOG(FATAL) << "Unexpected parameter type " << type;
2171 break;
2172 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002173 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002174}
2175
Calin Juravle175dc732015-08-25 15:42:32 +01002176void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2177 // The trampoline uses the same calling convention as dex calling conventions,
2178 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2179 // the method_idx.
2180 HandleInvoke(invoke);
2181}
2182
2183void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2184 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2185}
2186
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002187void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002188 // Explicit clinit checks triggered by static invokes must have been pruned by
2189 // art::PrepareForRegisterAllocation.
2190 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002191
Mark Mendellfb8d2792015-03-31 22:16:59 -04002192 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002193 if (intrinsic.TryDispatch(invoke)) {
2194 return;
2195 }
2196
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002197 HandleInvoke(invoke);
2198}
2199
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002200static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2201 if (invoke->GetLocations()->Intrinsified()) {
2202 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2203 intrinsic.Dispatch(invoke);
2204 return true;
2205 }
2206 return false;
2207}
2208
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002209void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002210 // Explicit clinit checks triggered by static invokes must have been pruned by
2211 // art::PrepareForRegisterAllocation.
2212 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002213
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002214 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2215 return;
2216 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002217
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002218 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002219 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002220 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002221 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002222}
2223
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002224void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002225 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002226 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002227}
2228
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002229void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002230 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002231 if (intrinsic.TryDispatch(invoke)) {
2232 return;
2233 }
2234
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002235 HandleInvoke(invoke);
2236}
2237
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002238void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002239 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2240 return;
2241 }
2242
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002243 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002244 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002245 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002246}
2247
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002248void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2249 HandleInvoke(invoke);
2250 // Add the hidden argument.
2251 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2252}
2253
2254void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2255 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002256 LocationSummary* locations = invoke->GetLocations();
2257 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2258 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Mathieu Chartiere401d142015-04-22 13:56:20 -07002259 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2260 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002261 Location receiver = locations->InAt(0);
2262 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2263
Roland Levillain0d5a2812015-11-13 10:07:31 +00002264 // Set the hidden argument. This is safe to do this here, as RAX
2265 // won't be modified thereafter, before the `call` instruction.
2266 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002267 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002268
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002269 if (receiver.IsStackSlot()) {
2270 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002271 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002272 __ movl(temp, Address(temp, class_offset));
2273 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002274 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002275 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002276 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002277 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002278 // Instead of simply (possibly) unpoisoning `temp` here, we should
2279 // emit a read barrier for the previous class reference load.
2280 // However this is not required in practice, as this is an
2281 // intermediate/temporary reference and because the current
2282 // concurrent copying collector keeps the from-space memory
2283 // intact/accessible until the end of the marking phase (the
2284 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002285 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002286 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002287 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002288 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002289 __ call(Address(temp,
2290 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002291
2292 DCHECK(!codegen_->IsLeafMethod());
2293 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2294}
2295
Roland Levillain88cb1752014-10-20 16:36:47 +01002296void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2297 LocationSummary* locations =
2298 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2299 switch (neg->GetResultType()) {
2300 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002301 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002302 locations->SetInAt(0, Location::RequiresRegister());
2303 locations->SetOut(Location::SameAsFirstInput());
2304 break;
2305
Roland Levillain88cb1752014-10-20 16:36:47 +01002306 case Primitive::kPrimFloat:
2307 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002308 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002309 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002310 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002311 break;
2312
2313 default:
2314 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2315 }
2316}
2317
2318void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2319 LocationSummary* locations = neg->GetLocations();
2320 Location out = locations->Out();
2321 Location in = locations->InAt(0);
2322 switch (neg->GetResultType()) {
2323 case Primitive::kPrimInt:
2324 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002325 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002326 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002327 break;
2328
2329 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002330 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002331 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002332 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002333 break;
2334
Roland Levillain5368c212014-11-27 15:03:41 +00002335 case Primitive::kPrimFloat: {
2336 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002337 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002338 // Implement float negation with an exclusive or with value
2339 // 0x80000000 (mask for bit 31, representing the sign of a
2340 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002341 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002342 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002343 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002344 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002345
Roland Levillain5368c212014-11-27 15:03:41 +00002346 case Primitive::kPrimDouble: {
2347 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002348 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002349 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002350 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002351 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002352 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002353 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002354 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002355 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002356
2357 default:
2358 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2359 }
2360}
2361
Roland Levillaindff1f282014-11-05 14:15:05 +00002362void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2363 LocationSummary* locations =
2364 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2365 Primitive::Type result_type = conversion->GetResultType();
2366 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002367 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002368
David Brazdilb2bd1c52015-03-25 11:17:37 +00002369 // The Java language does not allow treating boolean as an integral type but
2370 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002371
Roland Levillaindff1f282014-11-05 14:15:05 +00002372 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002373 case Primitive::kPrimByte:
2374 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002375 case Primitive::kPrimLong:
2376 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002377 case Primitive::kPrimBoolean:
2378 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002379 case Primitive::kPrimShort:
2380 case Primitive::kPrimInt:
2381 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002382 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002383 locations->SetInAt(0, Location::Any());
2384 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2385 break;
2386
2387 default:
2388 LOG(FATAL) << "Unexpected type conversion from " << input_type
2389 << " to " << result_type;
2390 }
2391 break;
2392
Roland Levillain01a8d712014-11-14 16:27:39 +00002393 case Primitive::kPrimShort:
2394 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002395 case Primitive::kPrimLong:
2396 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002397 case Primitive::kPrimBoolean:
2398 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002399 case Primitive::kPrimByte:
2400 case Primitive::kPrimInt:
2401 case Primitive::kPrimChar:
2402 // Processing a Dex `int-to-short' instruction.
2403 locations->SetInAt(0, Location::Any());
2404 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2405 break;
2406
2407 default:
2408 LOG(FATAL) << "Unexpected type conversion from " << input_type
2409 << " to " << result_type;
2410 }
2411 break;
2412
Roland Levillain946e1432014-11-11 17:35:19 +00002413 case Primitive::kPrimInt:
2414 switch (input_type) {
2415 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002416 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002417 locations->SetInAt(0, Location::Any());
2418 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2419 break;
2420
2421 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002422 // Processing a Dex `float-to-int' instruction.
2423 locations->SetInAt(0, Location::RequiresFpuRegister());
2424 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002425 break;
2426
Roland Levillain946e1432014-11-11 17:35:19 +00002427 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002428 // Processing a Dex `double-to-int' instruction.
2429 locations->SetInAt(0, Location::RequiresFpuRegister());
2430 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002431 break;
2432
2433 default:
2434 LOG(FATAL) << "Unexpected type conversion from " << input_type
2435 << " to " << result_type;
2436 }
2437 break;
2438
Roland Levillaindff1f282014-11-05 14:15:05 +00002439 case Primitive::kPrimLong:
2440 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002441 case Primitive::kPrimBoolean:
2442 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002443 case Primitive::kPrimByte:
2444 case Primitive::kPrimShort:
2445 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002446 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002447 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002448 // TODO: We would benefit from a (to-be-implemented)
2449 // Location::RegisterOrStackSlot requirement for this input.
2450 locations->SetInAt(0, Location::RequiresRegister());
2451 locations->SetOut(Location::RequiresRegister());
2452 break;
2453
2454 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002455 // Processing a Dex `float-to-long' instruction.
2456 locations->SetInAt(0, Location::RequiresFpuRegister());
2457 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002458 break;
2459
Roland Levillaindff1f282014-11-05 14:15:05 +00002460 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002461 // Processing a Dex `double-to-long' instruction.
2462 locations->SetInAt(0, Location::RequiresFpuRegister());
2463 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002464 break;
2465
2466 default:
2467 LOG(FATAL) << "Unexpected type conversion from " << input_type
2468 << " to " << result_type;
2469 }
2470 break;
2471
Roland Levillain981e4542014-11-14 11:47:14 +00002472 case Primitive::kPrimChar:
2473 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002474 case Primitive::kPrimLong:
2475 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002476 case Primitive::kPrimBoolean:
2477 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002478 case Primitive::kPrimByte:
2479 case Primitive::kPrimShort:
2480 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002481 // Processing a Dex `int-to-char' instruction.
2482 locations->SetInAt(0, Location::Any());
2483 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2484 break;
2485
2486 default:
2487 LOG(FATAL) << "Unexpected type conversion from " << input_type
2488 << " to " << result_type;
2489 }
2490 break;
2491
Roland Levillaindff1f282014-11-05 14:15:05 +00002492 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002493 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002494 case Primitive::kPrimBoolean:
2495 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002496 case Primitive::kPrimByte:
2497 case Primitive::kPrimShort:
2498 case Primitive::kPrimInt:
2499 case Primitive::kPrimChar:
2500 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002501 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002502 locations->SetOut(Location::RequiresFpuRegister());
2503 break;
2504
2505 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002506 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002507 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002508 locations->SetOut(Location::RequiresFpuRegister());
2509 break;
2510
Roland Levillaincff13742014-11-17 14:32:17 +00002511 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002512 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002513 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002514 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002515 break;
2516
2517 default:
2518 LOG(FATAL) << "Unexpected type conversion from " << input_type
2519 << " to " << result_type;
2520 };
2521 break;
2522
Roland Levillaindff1f282014-11-05 14:15:05 +00002523 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002524 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002525 case Primitive::kPrimBoolean:
2526 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002527 case Primitive::kPrimByte:
2528 case Primitive::kPrimShort:
2529 case Primitive::kPrimInt:
2530 case Primitive::kPrimChar:
2531 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002532 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002533 locations->SetOut(Location::RequiresFpuRegister());
2534 break;
2535
2536 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002537 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002538 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002539 locations->SetOut(Location::RequiresFpuRegister());
2540 break;
2541
Roland Levillaincff13742014-11-17 14:32:17 +00002542 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002543 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002544 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002545 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002546 break;
2547
2548 default:
2549 LOG(FATAL) << "Unexpected type conversion from " << input_type
2550 << " to " << result_type;
2551 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002552 break;
2553
2554 default:
2555 LOG(FATAL) << "Unexpected type conversion from " << input_type
2556 << " to " << result_type;
2557 }
2558}
2559
2560void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2561 LocationSummary* locations = conversion->GetLocations();
2562 Location out = locations->Out();
2563 Location in = locations->InAt(0);
2564 Primitive::Type result_type = conversion->GetResultType();
2565 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002566 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002567 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002568 case Primitive::kPrimByte:
2569 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002570 case Primitive::kPrimLong:
2571 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002572 case Primitive::kPrimBoolean:
2573 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002574 case Primitive::kPrimShort:
2575 case Primitive::kPrimInt:
2576 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002577 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002578 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002579 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002580 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002581 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002582 Address(CpuRegister(RSP), in.GetStackIndex()));
2583 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002584 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002585 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002586 }
2587 break;
2588
2589 default:
2590 LOG(FATAL) << "Unexpected type conversion from " << input_type
2591 << " to " << result_type;
2592 }
2593 break;
2594
Roland Levillain01a8d712014-11-14 16:27:39 +00002595 case Primitive::kPrimShort:
2596 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002597 case Primitive::kPrimLong:
2598 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002599 case Primitive::kPrimBoolean:
2600 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002601 case Primitive::kPrimByte:
2602 case Primitive::kPrimInt:
2603 case Primitive::kPrimChar:
2604 // Processing a Dex `int-to-short' instruction.
2605 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002606 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002607 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002608 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002609 Address(CpuRegister(RSP), in.GetStackIndex()));
2610 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002611 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002612 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002613 }
2614 break;
2615
2616 default:
2617 LOG(FATAL) << "Unexpected type conversion from " << input_type
2618 << " to " << result_type;
2619 }
2620 break;
2621
Roland Levillain946e1432014-11-11 17:35:19 +00002622 case Primitive::kPrimInt:
2623 switch (input_type) {
2624 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002625 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002626 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002627 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002628 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002629 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002630 Address(CpuRegister(RSP), in.GetStackIndex()));
2631 } else {
2632 DCHECK(in.IsConstant());
2633 DCHECK(in.GetConstant()->IsLongConstant());
2634 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002635 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002636 }
2637 break;
2638
Roland Levillain3f8f9362014-12-02 17:45:01 +00002639 case Primitive::kPrimFloat: {
2640 // Processing a Dex `float-to-int' instruction.
2641 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2642 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002643 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002644
2645 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002646 // if input >= (float)INT_MAX goto done
2647 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002648 __ j(kAboveEqual, &done);
2649 // if input == NaN goto nan
2650 __ j(kUnordered, &nan);
2651 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002652 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002653 __ jmp(&done);
2654 __ Bind(&nan);
2655 // output = 0
2656 __ xorl(output, output);
2657 __ Bind(&done);
2658 break;
2659 }
2660
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002661 case Primitive::kPrimDouble: {
2662 // Processing a Dex `double-to-int' instruction.
2663 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2664 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002665 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002666
2667 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002668 // if input >= (double)INT_MAX goto done
2669 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002670 __ j(kAboveEqual, &done);
2671 // if input == NaN goto nan
2672 __ j(kUnordered, &nan);
2673 // output = double-to-int-truncate(input)
2674 __ cvttsd2si(output, input);
2675 __ jmp(&done);
2676 __ Bind(&nan);
2677 // output = 0
2678 __ xorl(output, output);
2679 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002680 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002681 }
Roland Levillain946e1432014-11-11 17:35:19 +00002682
2683 default:
2684 LOG(FATAL) << "Unexpected type conversion from " << input_type
2685 << " to " << result_type;
2686 }
2687 break;
2688
Roland Levillaindff1f282014-11-05 14:15:05 +00002689 case Primitive::kPrimLong:
2690 switch (input_type) {
2691 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002692 case Primitive::kPrimBoolean:
2693 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002694 case Primitive::kPrimByte:
2695 case Primitive::kPrimShort:
2696 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002697 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002698 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002699 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002700 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002701 break;
2702
Roland Levillain624279f2014-12-04 11:54:28 +00002703 case Primitive::kPrimFloat: {
2704 // Processing a Dex `float-to-long' instruction.
2705 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2706 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002707 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002708
Mark Mendell92e83bf2015-05-07 11:25:03 -04002709 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002710 // if input >= (float)LONG_MAX goto done
2711 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002712 __ j(kAboveEqual, &done);
2713 // if input == NaN goto nan
2714 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002715 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002716 __ cvttss2si(output, input, true);
2717 __ jmp(&done);
2718 __ Bind(&nan);
2719 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002720 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002721 __ Bind(&done);
2722 break;
2723 }
2724
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002725 case Primitive::kPrimDouble: {
2726 // Processing a Dex `double-to-long' instruction.
2727 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2728 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002729 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002730
Mark Mendell92e83bf2015-05-07 11:25:03 -04002731 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002732 // if input >= (double)LONG_MAX goto done
2733 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002734 __ j(kAboveEqual, &done);
2735 // if input == NaN goto nan
2736 __ j(kUnordered, &nan);
2737 // output = double-to-long-truncate(input)
2738 __ cvttsd2si(output, input, true);
2739 __ jmp(&done);
2740 __ Bind(&nan);
2741 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002742 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002743 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002744 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002745 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002746
2747 default:
2748 LOG(FATAL) << "Unexpected type conversion from " << input_type
2749 << " to " << result_type;
2750 }
2751 break;
2752
Roland Levillain981e4542014-11-14 11:47:14 +00002753 case Primitive::kPrimChar:
2754 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002755 case Primitive::kPrimLong:
2756 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002757 case Primitive::kPrimBoolean:
2758 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002759 case Primitive::kPrimByte:
2760 case Primitive::kPrimShort:
2761 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002762 // Processing a Dex `int-to-char' instruction.
2763 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002764 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002765 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002766 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002767 Address(CpuRegister(RSP), in.GetStackIndex()));
2768 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002769 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002770 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002771 }
2772 break;
2773
2774 default:
2775 LOG(FATAL) << "Unexpected type conversion from " << input_type
2776 << " to " << result_type;
2777 }
2778 break;
2779
Roland Levillaindff1f282014-11-05 14:15:05 +00002780 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002781 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002782 case Primitive::kPrimBoolean:
2783 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002784 case Primitive::kPrimByte:
2785 case Primitive::kPrimShort:
2786 case Primitive::kPrimInt:
2787 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002788 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002789 if (in.IsRegister()) {
2790 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2791 } else if (in.IsConstant()) {
2792 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2793 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002794 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002795 } else {
2796 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2797 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2798 }
Roland Levillaincff13742014-11-17 14:32:17 +00002799 break;
2800
2801 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002802 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002803 if (in.IsRegister()) {
2804 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2805 } else if (in.IsConstant()) {
2806 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2807 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002808 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002809 } else {
2810 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2811 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2812 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002813 break;
2814
Roland Levillaincff13742014-11-17 14:32:17 +00002815 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002816 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002817 if (in.IsFpuRegister()) {
2818 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2819 } else if (in.IsConstant()) {
2820 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2821 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002822 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002823 } else {
2824 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2825 Address(CpuRegister(RSP), in.GetStackIndex()));
2826 }
Roland Levillaincff13742014-11-17 14:32:17 +00002827 break;
2828
2829 default:
2830 LOG(FATAL) << "Unexpected type conversion from " << input_type
2831 << " to " << result_type;
2832 };
2833 break;
2834
Roland Levillaindff1f282014-11-05 14:15:05 +00002835 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002836 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002837 case Primitive::kPrimBoolean:
2838 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002839 case Primitive::kPrimByte:
2840 case Primitive::kPrimShort:
2841 case Primitive::kPrimInt:
2842 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002843 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002844 if (in.IsRegister()) {
2845 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2846 } else if (in.IsConstant()) {
2847 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2848 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002849 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002850 } else {
2851 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2852 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2853 }
Roland Levillaincff13742014-11-17 14:32:17 +00002854 break;
2855
2856 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002857 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002858 if (in.IsRegister()) {
2859 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2860 } else if (in.IsConstant()) {
2861 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2862 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002863 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002864 } else {
2865 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2866 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2867 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002868 break;
2869
Roland Levillaincff13742014-11-17 14:32:17 +00002870 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002871 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002872 if (in.IsFpuRegister()) {
2873 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2874 } else if (in.IsConstant()) {
2875 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2876 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002877 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002878 } else {
2879 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2880 Address(CpuRegister(RSP), in.GetStackIndex()));
2881 }
Roland Levillaincff13742014-11-17 14:32:17 +00002882 break;
2883
2884 default:
2885 LOG(FATAL) << "Unexpected type conversion from " << input_type
2886 << " to " << result_type;
2887 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002888 break;
2889
2890 default:
2891 LOG(FATAL) << "Unexpected type conversion from " << input_type
2892 << " to " << result_type;
2893 }
2894}
2895
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002896void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002897 LocationSummary* locations =
2898 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002899 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002900 case Primitive::kPrimInt: {
2901 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002902 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2903 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002904 break;
2905 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002906
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002907 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002908 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002909 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002910 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002911 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002912 break;
2913 }
2914
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002915 case Primitive::kPrimDouble:
2916 case Primitive::kPrimFloat: {
2917 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002918 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002919 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002920 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002921 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002922
2923 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002924 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002925 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002926}
2927
2928void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2929 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002930 Location first = locations->InAt(0);
2931 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002932 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002933
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002934 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002935 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002936 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002937 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2938 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002939 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2940 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002941 } else {
2942 __ leal(out.AsRegister<CpuRegister>(), Address(
2943 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2944 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002945 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002946 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2947 __ addl(out.AsRegister<CpuRegister>(),
2948 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2949 } else {
2950 __ leal(out.AsRegister<CpuRegister>(), Address(
2951 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2952 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002953 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002954 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002955 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002956 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002957 break;
2958 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002959
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002960 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002961 if (second.IsRegister()) {
2962 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2963 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002964 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2965 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002966 } else {
2967 __ leaq(out.AsRegister<CpuRegister>(), Address(
2968 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2969 }
2970 } else {
2971 DCHECK(second.IsConstant());
2972 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2973 int32_t int32_value = Low32Bits(value);
2974 DCHECK_EQ(int32_value, value);
2975 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2976 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2977 } else {
2978 __ leaq(out.AsRegister<CpuRegister>(), Address(
2979 first.AsRegister<CpuRegister>(), int32_value));
2980 }
2981 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002982 break;
2983 }
2984
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002985 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002986 if (second.IsFpuRegister()) {
2987 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2988 } else if (second.IsConstant()) {
2989 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002990 codegen_->LiteralFloatAddress(
2991 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002992 } else {
2993 DCHECK(second.IsStackSlot());
2994 __ addss(first.AsFpuRegister<XmmRegister>(),
2995 Address(CpuRegister(RSP), second.GetStackIndex()));
2996 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002997 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002998 }
2999
3000 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003001 if (second.IsFpuRegister()) {
3002 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3003 } else if (second.IsConstant()) {
3004 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003005 codegen_->LiteralDoubleAddress(
3006 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003007 } else {
3008 DCHECK(second.IsDoubleStackSlot());
3009 __ addsd(first.AsFpuRegister<XmmRegister>(),
3010 Address(CpuRegister(RSP), second.GetStackIndex()));
3011 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003012 break;
3013 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003014
3015 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003016 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003017 }
3018}
3019
3020void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003021 LocationSummary* locations =
3022 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003023 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003024 case Primitive::kPrimInt: {
3025 locations->SetInAt(0, Location::RequiresRegister());
3026 locations->SetInAt(1, Location::Any());
3027 locations->SetOut(Location::SameAsFirstInput());
3028 break;
3029 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003030 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003031 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003032 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003033 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003034 break;
3035 }
Calin Juravle11351682014-10-23 15:38:15 +01003036 case Primitive::kPrimFloat:
3037 case Primitive::kPrimDouble: {
3038 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003039 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003040 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003041 break;
Calin Juravle11351682014-10-23 15:38:15 +01003042 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003043 default:
Calin Juravle11351682014-10-23 15:38:15 +01003044 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003045 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003046}
3047
3048void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3049 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003050 Location first = locations->InAt(0);
3051 Location second = locations->InAt(1);
3052 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003053 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003054 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003055 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003056 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003057 } else if (second.IsConstant()) {
3058 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003059 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003060 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003061 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003062 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003063 break;
3064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003066 if (second.IsConstant()) {
3067 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3068 DCHECK(IsInt<32>(value));
3069 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3070 } else {
3071 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3072 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003073 break;
3074 }
3075
Calin Juravle11351682014-10-23 15:38:15 +01003076 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003077 if (second.IsFpuRegister()) {
3078 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3079 } else if (second.IsConstant()) {
3080 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003081 codegen_->LiteralFloatAddress(
3082 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003083 } else {
3084 DCHECK(second.IsStackSlot());
3085 __ subss(first.AsFpuRegister<XmmRegister>(),
3086 Address(CpuRegister(RSP), second.GetStackIndex()));
3087 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003088 break;
Calin Juravle11351682014-10-23 15:38:15 +01003089 }
3090
3091 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003092 if (second.IsFpuRegister()) {
3093 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3094 } else if (second.IsConstant()) {
3095 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003096 codegen_->LiteralDoubleAddress(
3097 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003098 } else {
3099 DCHECK(second.IsDoubleStackSlot());
3100 __ subsd(first.AsFpuRegister<XmmRegister>(),
3101 Address(CpuRegister(RSP), second.GetStackIndex()));
3102 }
Calin Juravle11351682014-10-23 15:38:15 +01003103 break;
3104 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003105
3106 default:
Calin Juravle11351682014-10-23 15:38:15 +01003107 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003108 }
3109}
3110
Calin Juravle34bacdf2014-10-07 20:23:36 +01003111void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3112 LocationSummary* locations =
3113 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3114 switch (mul->GetResultType()) {
3115 case Primitive::kPrimInt: {
3116 locations->SetInAt(0, Location::RequiresRegister());
3117 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003118 if (mul->InputAt(1)->IsIntConstant()) {
3119 // Can use 3 operand multiply.
3120 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3121 } else {
3122 locations->SetOut(Location::SameAsFirstInput());
3123 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003124 break;
3125 }
3126 case Primitive::kPrimLong: {
3127 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003128 locations->SetInAt(1, Location::Any());
3129 if (mul->InputAt(1)->IsLongConstant() &&
3130 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003131 // Can use 3 operand multiply.
3132 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3133 } else {
3134 locations->SetOut(Location::SameAsFirstInput());
3135 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003136 break;
3137 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003138 case Primitive::kPrimFloat:
3139 case Primitive::kPrimDouble: {
3140 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003141 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003142 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003143 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003144 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003145
3146 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003147 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148 }
3149}
3150
3151void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3152 LocationSummary* locations = mul->GetLocations();
3153 Location first = locations->InAt(0);
3154 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003155 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003156 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003157 case Primitive::kPrimInt:
3158 // The constant may have ended up in a register, so test explicitly to avoid
3159 // problems where the output may not be the same as the first operand.
3160 if (mul->InputAt(1)->IsIntConstant()) {
3161 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3162 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3163 } else if (second.IsRegister()) {
3164 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003165 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003166 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003167 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003168 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003169 __ imull(first.AsRegister<CpuRegister>(),
3170 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003171 }
3172 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003173 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003174 // The constant may have ended up in a register, so test explicitly to avoid
3175 // problems where the output may not be the same as the first operand.
3176 if (mul->InputAt(1)->IsLongConstant()) {
3177 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3178 if (IsInt<32>(value)) {
3179 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3180 Immediate(static_cast<int32_t>(value)));
3181 } else {
3182 // Have to use the constant area.
3183 DCHECK(first.Equals(out));
3184 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3185 }
3186 } else if (second.IsRegister()) {
3187 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003188 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003189 } else {
3190 DCHECK(second.IsDoubleStackSlot());
3191 DCHECK(first.Equals(out));
3192 __ imulq(first.AsRegister<CpuRegister>(),
3193 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003194 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003195 break;
3196 }
3197
Calin Juravleb5bfa962014-10-21 18:02:24 +01003198 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003199 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003200 if (second.IsFpuRegister()) {
3201 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3202 } else if (second.IsConstant()) {
3203 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003204 codegen_->LiteralFloatAddress(
3205 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003206 } else {
3207 DCHECK(second.IsStackSlot());
3208 __ mulss(first.AsFpuRegister<XmmRegister>(),
3209 Address(CpuRegister(RSP), second.GetStackIndex()));
3210 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003211 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003212 }
3213
3214 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003215 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003216 if (second.IsFpuRegister()) {
3217 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3218 } else if (second.IsConstant()) {
3219 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003220 codegen_->LiteralDoubleAddress(
3221 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003222 } else {
3223 DCHECK(second.IsDoubleStackSlot());
3224 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3225 Address(CpuRegister(RSP), second.GetStackIndex()));
3226 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003227 break;
3228 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003229
3230 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003231 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003232 }
3233}
3234
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003235void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3236 uint32_t stack_adjustment, bool is_float) {
3237 if (source.IsStackSlot()) {
3238 DCHECK(is_float);
3239 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3240 } else if (source.IsDoubleStackSlot()) {
3241 DCHECK(!is_float);
3242 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3243 } else {
3244 // Write the value to the temporary location on the stack and load to FP stack.
3245 if (is_float) {
3246 Location stack_temp = Location::StackSlot(temp_offset);
3247 codegen_->Move(stack_temp, source);
3248 __ flds(Address(CpuRegister(RSP), temp_offset));
3249 } else {
3250 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3251 codegen_->Move(stack_temp, source);
3252 __ fldl(Address(CpuRegister(RSP), temp_offset));
3253 }
3254 }
3255}
3256
3257void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3258 Primitive::Type type = rem->GetResultType();
3259 bool is_float = type == Primitive::kPrimFloat;
3260 size_t elem_size = Primitive::ComponentSize(type);
3261 LocationSummary* locations = rem->GetLocations();
3262 Location first = locations->InAt(0);
3263 Location second = locations->InAt(1);
3264 Location out = locations->Out();
3265
3266 // Create stack space for 2 elements.
3267 // TODO: enhance register allocator to ask for stack temporaries.
3268 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3269
3270 // Load the values to the FP stack in reverse order, using temporaries if needed.
3271 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3272 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3273
3274 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003275 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003276 __ Bind(&retry);
3277 __ fprem();
3278
3279 // Move FP status to AX.
3280 __ fstsw();
3281
3282 // And see if the argument reduction is complete. This is signaled by the
3283 // C2 FPU flag bit set to 0.
3284 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3285 __ j(kNotEqual, &retry);
3286
3287 // We have settled on the final value. Retrieve it into an XMM register.
3288 // Store FP top of stack to real stack.
3289 if (is_float) {
3290 __ fsts(Address(CpuRegister(RSP), 0));
3291 } else {
3292 __ fstl(Address(CpuRegister(RSP), 0));
3293 }
3294
3295 // Pop the 2 items from the FP stack.
3296 __ fucompp();
3297
3298 // Load the value from the stack into an XMM register.
3299 DCHECK(out.IsFpuRegister()) << out;
3300 if (is_float) {
3301 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3302 } else {
3303 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3304 }
3305
3306 // And remove the temporary stack space we allocated.
3307 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3308}
3309
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003310void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3311 DCHECK(instruction->IsDiv() || instruction->IsRem());
3312
3313 LocationSummary* locations = instruction->GetLocations();
3314 Location second = locations->InAt(1);
3315 DCHECK(second.IsConstant());
3316
3317 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3318 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003319 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003320
3321 DCHECK(imm == 1 || imm == -1);
3322
3323 switch (instruction->GetResultType()) {
3324 case Primitive::kPrimInt: {
3325 if (instruction->IsRem()) {
3326 __ xorl(output_register, output_register);
3327 } else {
3328 __ movl(output_register, input_register);
3329 if (imm == -1) {
3330 __ negl(output_register);
3331 }
3332 }
3333 break;
3334 }
3335
3336 case Primitive::kPrimLong: {
3337 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003338 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003339 } else {
3340 __ movq(output_register, input_register);
3341 if (imm == -1) {
3342 __ negq(output_register);
3343 }
3344 }
3345 break;
3346 }
3347
3348 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003349 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003350 }
3351}
3352
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003353void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003354 LocationSummary* locations = instruction->GetLocations();
3355 Location second = locations->InAt(1);
3356
3357 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3358 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3359
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003360 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003361 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3362 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003363
3364 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3365
3366 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003367 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003368 __ testl(numerator, numerator);
3369 __ cmov(kGreaterEqual, tmp, numerator);
3370 int shift = CTZ(imm);
3371 __ sarl(tmp, Immediate(shift));
3372
3373 if (imm < 0) {
3374 __ negl(tmp);
3375 }
3376
3377 __ movl(output_register, tmp);
3378 } else {
3379 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3380 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3381
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003382 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003383 __ addq(rdx, numerator);
3384 __ testq(numerator, numerator);
3385 __ cmov(kGreaterEqual, rdx, numerator);
3386 int shift = CTZ(imm);
3387 __ sarq(rdx, Immediate(shift));
3388
3389 if (imm < 0) {
3390 __ negq(rdx);
3391 }
3392
3393 __ movq(output_register, rdx);
3394 }
3395}
3396
3397void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3398 DCHECK(instruction->IsDiv() || instruction->IsRem());
3399
3400 LocationSummary* locations = instruction->GetLocations();
3401 Location second = locations->InAt(1);
3402
3403 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3404 : locations->GetTemp(0).AsRegister<CpuRegister>();
3405 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3406 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3407 : locations->Out().AsRegister<CpuRegister>();
3408 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3409
3410 DCHECK_EQ(RAX, eax.AsRegister());
3411 DCHECK_EQ(RDX, edx.AsRegister());
3412 if (instruction->IsDiv()) {
3413 DCHECK_EQ(RAX, out.AsRegister());
3414 } else {
3415 DCHECK_EQ(RDX, out.AsRegister());
3416 }
3417
3418 int64_t magic;
3419 int shift;
3420
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003421 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003422 if (instruction->GetResultType() == Primitive::kPrimInt) {
3423 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3424
3425 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3426
3427 __ movl(numerator, eax);
3428
Mark Mendell0c9497d2015-08-21 09:30:05 -04003429 NearLabel no_div;
3430 NearLabel end;
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003431 __ testl(eax, eax);
3432 __ j(kNotEqual, &no_div);
3433
3434 __ xorl(out, out);
3435 __ jmp(&end);
3436
3437 __ Bind(&no_div);
3438
3439 __ movl(eax, Immediate(magic));
3440 __ imull(numerator);
3441
3442 if (imm > 0 && magic < 0) {
3443 __ addl(edx, numerator);
3444 } else if (imm < 0 && magic > 0) {
3445 __ subl(edx, numerator);
3446 }
3447
3448 if (shift != 0) {
3449 __ sarl(edx, Immediate(shift));
3450 }
3451
3452 __ movl(eax, edx);
3453 __ shrl(edx, Immediate(31));
3454 __ addl(edx, eax);
3455
3456 if (instruction->IsRem()) {
3457 __ movl(eax, numerator);
3458 __ imull(edx, Immediate(imm));
3459 __ subl(eax, edx);
3460 __ movl(edx, eax);
3461 } else {
3462 __ movl(eax, edx);
3463 }
3464 __ Bind(&end);
3465 } else {
3466 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3467
3468 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3469
3470 CpuRegister rax = eax;
3471 CpuRegister rdx = edx;
3472
3473 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3474
3475 // Save the numerator.
3476 __ movq(numerator, rax);
3477
3478 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003479 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003480
3481 // RDX:RAX = magic * numerator
3482 __ imulq(numerator);
3483
3484 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003485 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003486 __ addq(rdx, numerator);
3487 } else if (imm < 0 && magic > 0) {
3488 // RDX -= numerator
3489 __ subq(rdx, numerator);
3490 }
3491
3492 // Shift if needed.
3493 if (shift != 0) {
3494 __ sarq(rdx, Immediate(shift));
3495 }
3496
3497 // RDX += 1 if RDX < 0
3498 __ movq(rax, rdx);
3499 __ shrq(rdx, Immediate(63));
3500 __ addq(rdx, rax);
3501
3502 if (instruction->IsRem()) {
3503 __ movq(rax, numerator);
3504
3505 if (IsInt<32>(imm)) {
3506 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3507 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003508 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003509 }
3510
3511 __ subq(rax, rdx);
3512 __ movq(rdx, rax);
3513 } else {
3514 __ movq(rax, rdx);
3515 }
3516 }
3517}
3518
Calin Juravlebacfec32014-11-14 15:54:36 +00003519void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3520 DCHECK(instruction->IsDiv() || instruction->IsRem());
3521 Primitive::Type type = instruction->GetResultType();
3522 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3523
3524 bool is_div = instruction->IsDiv();
3525 LocationSummary* locations = instruction->GetLocations();
3526
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003527 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3528 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003529
Roland Levillain271ab9c2014-11-27 15:23:57 +00003530 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003531 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003532
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003533 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003534 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003535
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003536 if (imm == 0) {
3537 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3538 } else if (imm == 1 || imm == -1) {
3539 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003540 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003541 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003542 } else {
3543 DCHECK(imm <= -2 || imm >= 2);
3544 GenerateDivRemWithAnyConstant(instruction);
3545 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003546 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003547 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003548 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
3549 out.AsRegister(), type, is_div);
3550 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003551
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003552 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3553 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3554 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3555 // so it's safe to just use negl instead of more complex comparisons.
3556 if (type == Primitive::kPrimInt) {
3557 __ cmpl(second_reg, Immediate(-1));
3558 __ j(kEqual, slow_path->GetEntryLabel());
3559 // edx:eax <- sign-extended of eax
3560 __ cdq();
3561 // eax = quotient, edx = remainder
3562 __ idivl(second_reg);
3563 } else {
3564 __ cmpq(second_reg, Immediate(-1));
3565 __ j(kEqual, slow_path->GetEntryLabel());
3566 // rdx:rax <- sign-extended of rax
3567 __ cqo();
3568 // rax = quotient, rdx = remainder
3569 __ idivq(second_reg);
3570 }
3571 __ Bind(slow_path->GetExitLabel());
3572 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003573}
3574
Calin Juravle7c4954d2014-10-28 16:57:40 +00003575void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3576 LocationSummary* locations =
3577 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3578 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003579 case Primitive::kPrimInt:
3580 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003581 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003582 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003583 locations->SetOut(Location::SameAsFirstInput());
3584 // Intel uses edx:eax as the dividend.
3585 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003586 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3587 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3588 // output and request another temp.
3589 if (div->InputAt(1)->IsConstant()) {
3590 locations->AddTemp(Location::RequiresRegister());
3591 }
Calin Juravled0d48522014-11-04 16:40:20 +00003592 break;
3593 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003594
Calin Juravle7c4954d2014-10-28 16:57:40 +00003595 case Primitive::kPrimFloat:
3596 case Primitive::kPrimDouble: {
3597 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003598 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003599 locations->SetOut(Location::SameAsFirstInput());
3600 break;
3601 }
3602
3603 default:
3604 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3605 }
3606}
3607
3608void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3609 LocationSummary* locations = div->GetLocations();
3610 Location first = locations->InAt(0);
3611 Location second = locations->InAt(1);
3612 DCHECK(first.Equals(locations->Out()));
3613
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003614 Primitive::Type type = div->GetResultType();
3615 switch (type) {
3616 case Primitive::kPrimInt:
3617 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003618 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003619 break;
3620 }
3621
Calin Juravle7c4954d2014-10-28 16:57:40 +00003622 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003623 if (second.IsFpuRegister()) {
3624 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3625 } else if (second.IsConstant()) {
3626 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003627 codegen_->LiteralFloatAddress(
3628 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003629 } else {
3630 DCHECK(second.IsStackSlot());
3631 __ divss(first.AsFpuRegister<XmmRegister>(),
3632 Address(CpuRegister(RSP), second.GetStackIndex()));
3633 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003634 break;
3635 }
3636
3637 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003638 if (second.IsFpuRegister()) {
3639 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3640 } else if (second.IsConstant()) {
3641 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003642 codegen_->LiteralDoubleAddress(
3643 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003644 } else {
3645 DCHECK(second.IsDoubleStackSlot());
3646 __ divsd(first.AsFpuRegister<XmmRegister>(),
3647 Address(CpuRegister(RSP), second.GetStackIndex()));
3648 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003649 break;
3650 }
3651
3652 default:
3653 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3654 }
3655}
3656
Calin Juravlebacfec32014-11-14 15:54:36 +00003657void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003658 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003659 LocationSummary* locations =
3660 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003661
3662 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003663 case Primitive::kPrimInt:
3664 case Primitive::kPrimLong: {
3665 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003666 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003667 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3668 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003669 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3670 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3671 // output and request another temp.
3672 if (rem->InputAt(1)->IsConstant()) {
3673 locations->AddTemp(Location::RequiresRegister());
3674 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003675 break;
3676 }
3677
3678 case Primitive::kPrimFloat:
3679 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003680 locations->SetInAt(0, Location::Any());
3681 locations->SetInAt(1, Location::Any());
3682 locations->SetOut(Location::RequiresFpuRegister());
3683 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003684 break;
3685 }
3686
3687 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003688 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003689 }
3690}
3691
3692void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3693 Primitive::Type type = rem->GetResultType();
3694 switch (type) {
3695 case Primitive::kPrimInt:
3696 case Primitive::kPrimLong: {
3697 GenerateDivRemIntegral(rem);
3698 break;
3699 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003700 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003701 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003702 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003703 break;
3704 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003705 default:
3706 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3707 }
3708}
3709
Calin Juravled0d48522014-11-04 16:40:20 +00003710void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003711 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3712 ? LocationSummary::kCallOnSlowPath
3713 : LocationSummary::kNoCall;
3714 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003715 locations->SetInAt(0, Location::Any());
3716 if (instruction->HasUses()) {
3717 locations->SetOut(Location::SameAsFirstInput());
3718 }
3719}
3720
3721void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003722 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003723 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3724 codegen_->AddSlowPath(slow_path);
3725
3726 LocationSummary* locations = instruction->GetLocations();
3727 Location value = locations->InAt(0);
3728
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003729 switch (instruction->GetType()) {
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003730 case Primitive::kPrimByte:
3731 case Primitive::kPrimChar:
3732 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003733 case Primitive::kPrimInt: {
3734 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003735 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003736 __ j(kEqual, slow_path->GetEntryLabel());
3737 } else if (value.IsStackSlot()) {
3738 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3739 __ j(kEqual, slow_path->GetEntryLabel());
3740 } else {
3741 DCHECK(value.IsConstant()) << value;
3742 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3743 __ jmp(slow_path->GetEntryLabel());
3744 }
3745 }
3746 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003747 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003748 case Primitive::kPrimLong: {
3749 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003750 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003751 __ j(kEqual, slow_path->GetEntryLabel());
3752 } else if (value.IsDoubleStackSlot()) {
3753 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3754 __ j(kEqual, slow_path->GetEntryLabel());
3755 } else {
3756 DCHECK(value.IsConstant()) << value;
3757 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3758 __ jmp(slow_path->GetEntryLabel());
3759 }
3760 }
3761 break;
3762 }
3763 default:
3764 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003765 }
Calin Juravled0d48522014-11-04 16:40:20 +00003766}
3767
Calin Juravle9aec02f2014-11-18 23:06:35 +00003768void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3769 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3770
3771 LocationSummary* locations =
3772 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3773
3774 switch (op->GetResultType()) {
3775 case Primitive::kPrimInt:
3776 case Primitive::kPrimLong: {
3777 locations->SetInAt(0, Location::RequiresRegister());
3778 // The shift count needs to be in CL.
3779 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3780 locations->SetOut(Location::SameAsFirstInput());
3781 break;
3782 }
3783 default:
3784 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3785 }
3786}
3787
3788void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3789 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3790
3791 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003792 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003793 Location second = locations->InAt(1);
3794
3795 switch (op->GetResultType()) {
3796 case Primitive::kPrimInt: {
3797 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003798 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003799 if (op->IsShl()) {
3800 __ shll(first_reg, second_reg);
3801 } else if (op->IsShr()) {
3802 __ sarl(first_reg, second_reg);
3803 } else {
3804 __ shrl(first_reg, second_reg);
3805 }
3806 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003807 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003808 if (op->IsShl()) {
3809 __ shll(first_reg, imm);
3810 } else if (op->IsShr()) {
3811 __ sarl(first_reg, imm);
3812 } else {
3813 __ shrl(first_reg, imm);
3814 }
3815 }
3816 break;
3817 }
3818 case Primitive::kPrimLong: {
3819 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003820 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003821 if (op->IsShl()) {
3822 __ shlq(first_reg, second_reg);
3823 } else if (op->IsShr()) {
3824 __ sarq(first_reg, second_reg);
3825 } else {
3826 __ shrq(first_reg, second_reg);
3827 }
3828 } else {
Nicolas Geoffray486cc192014-12-08 18:00:55 +00003829 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003830 if (op->IsShl()) {
3831 __ shlq(first_reg, imm);
3832 } else if (op->IsShr()) {
3833 __ sarq(first_reg, imm);
3834 } else {
3835 __ shrq(first_reg, imm);
3836 }
3837 }
3838 break;
3839 }
3840 default:
3841 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003842 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003843 }
3844}
3845
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003846void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3847 LocationSummary* locations =
3848 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3849
3850 switch (ror->GetResultType()) {
3851 case Primitive::kPrimInt:
3852 case Primitive::kPrimLong: {
3853 locations->SetInAt(0, Location::RequiresRegister());
3854 // The shift count needs to be in CL (unless it is a constant).
3855 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3856 locations->SetOut(Location::SameAsFirstInput());
3857 break;
3858 }
3859 default:
3860 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3861 UNREACHABLE();
3862 }
3863}
3864
3865void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3866 LocationSummary* locations = ror->GetLocations();
3867 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3868 Location second = locations->InAt(1);
3869
3870 switch (ror->GetResultType()) {
3871 case Primitive::kPrimInt:
3872 if (second.IsRegister()) {
3873 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3874 __ rorl(first_reg, second_reg);
3875 } else {
3876 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
3877 __ rorl(first_reg, imm);
3878 }
3879 break;
3880 case Primitive::kPrimLong:
3881 if (second.IsRegister()) {
3882 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3883 __ rorq(first_reg, second_reg);
3884 } else {
3885 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftValue);
3886 __ rorq(first_reg, imm);
3887 }
3888 break;
3889 default:
3890 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3891 UNREACHABLE();
3892 }
3893}
3894
Calin Juravle9aec02f2014-11-18 23:06:35 +00003895void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3896 HandleShift(shl);
3897}
3898
3899void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3900 HandleShift(shl);
3901}
3902
3903void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3904 HandleShift(shr);
3905}
3906
3907void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3908 HandleShift(shr);
3909}
3910
3911void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3912 HandleShift(ushr);
3913}
3914
3915void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3916 HandleShift(ushr);
3917}
3918
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003919void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003920 LocationSummary* locations =
3921 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003922 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003923 if (instruction->IsStringAlloc()) {
3924 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3925 } else {
3926 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3927 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3928 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003929 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003930}
3931
3932void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003933 // Note: if heap poisoning is enabled, the entry point takes cares
3934 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003935 if (instruction->IsStringAlloc()) {
3936 // String is allocated through StringFactory. Call NewEmptyString entry point.
3937 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3938 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3939 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3940 __ call(Address(temp, code_offset.SizeValue()));
3941 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3942 } else {
3943 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3944 instruction,
3945 instruction->GetDexPc(),
3946 nullptr);
3947 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3948 DCHECK(!codegen_->IsLeafMethod());
3949 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003950}
3951
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003952void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3953 LocationSummary* locations =
3954 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3955 InvokeRuntimeCallingConvention calling_convention;
3956 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003957 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003958 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003959 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003960}
3961
3962void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3963 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003964 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3965 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003966 // Note: if heap poisoning is enabled, the entry point takes cares
3967 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003968 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3969 instruction,
3970 instruction->GetDexPc(),
3971 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003972 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003973
3974 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003975}
3976
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003977void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003978 LocationSummary* locations =
3979 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003980 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3981 if (location.IsStackSlot()) {
3982 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3983 } else if (location.IsDoubleStackSlot()) {
3984 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3985 }
3986 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003987}
3988
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003989void InstructionCodeGeneratorX86_64::VisitParameterValue(
3990 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003991 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003992}
3993
3994void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3995 LocationSummary* locations =
3996 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3997 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3998}
3999
4000void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4001 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4002 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004003}
4004
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004005void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4006 LocationSummary* locations =
4007 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4008 locations->SetInAt(0, Location::RequiresRegister());
4009 locations->SetOut(Location::RequiresRegister());
4010}
4011
4012void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4013 LocationSummary* locations = instruction->GetLocations();
4014 uint32_t method_offset = 0;
4015 if (instruction->GetTableKind() == HClassTableGet::kVTable) {
4016 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4017 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
4018 } else {
4019 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
4020 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
4021 }
4022 __ movq(locations->Out().AsRegister<CpuRegister>(),
4023 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4024}
4025
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004026void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004027 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004028 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004029 locations->SetInAt(0, Location::RequiresRegister());
4030 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004031}
4032
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004033void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4034 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004035 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4036 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004037 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004038 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004039 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004040 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004041 break;
4042
4043 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004044 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004045 break;
4046
4047 default:
4048 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4049 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004050}
4051
David Brazdil66d126e2015-04-03 16:02:44 +01004052void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4053 LocationSummary* locations =
4054 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4055 locations->SetInAt(0, Location::RequiresRegister());
4056 locations->SetOut(Location::SameAsFirstInput());
4057}
4058
4059void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004060 LocationSummary* locations = bool_not->GetLocations();
4061 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4062 locations->Out().AsRegister<CpuRegister>().AsRegister());
4063 Location out = locations->Out();
4064 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4065}
4066
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004067void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004068 LocationSummary* locations =
4069 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004070 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
4071 locations->SetInAt(i, Location::Any());
4072 }
4073 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004074}
4075
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004076void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004077 LOG(FATAL) << "Unimplemented";
4078}
4079
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004080void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004081 /*
4082 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004083 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004084 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4085 */
4086 switch (kind) {
4087 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004088 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004089 break;
4090 }
4091 case MemBarrierKind::kAnyStore:
4092 case MemBarrierKind::kLoadAny:
4093 case MemBarrierKind::kStoreStore: {
4094 // nop
4095 break;
4096 }
4097 default:
4098 LOG(FATAL) << "Unexpected memory barier " << kind;
4099 }
4100}
4101
4102void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4103 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4104
Roland Levillain0d5a2812015-11-13 10:07:31 +00004105 bool object_field_get_with_read_barrier =
4106 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004107 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004108 new (GetGraph()->GetArena()) LocationSummary(instruction,
4109 object_field_get_with_read_barrier ?
4110 LocationSummary::kCallOnSlowPath :
4111 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004112 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004113 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4114 locations->SetOut(Location::RequiresFpuRegister());
4115 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004116 // The output overlaps for an object field get when read barriers
4117 // are enabled: we do not want the move to overwrite the object's
4118 // location, as we need it to emit the read barrier.
4119 locations->SetOut(
4120 Location::RequiresRegister(),
4121 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004122 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004123 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4124 // We need a temporary register for the read barrier marking slow
4125 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4126 locations->AddTemp(Location::RequiresRegister());
4127 }
Calin Juravle52c48962014-12-16 17:02:57 +00004128}
4129
4130void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4131 const FieldInfo& field_info) {
4132 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4133
4134 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004135 Location base_loc = locations->InAt(0);
4136 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004137 Location out = locations->Out();
4138 bool is_volatile = field_info.IsVolatile();
4139 Primitive::Type field_type = field_info.GetFieldType();
4140 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4141
4142 switch (field_type) {
4143 case Primitive::kPrimBoolean: {
4144 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4145 break;
4146 }
4147
4148 case Primitive::kPrimByte: {
4149 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4150 break;
4151 }
4152
4153 case Primitive::kPrimShort: {
4154 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4155 break;
4156 }
4157
4158 case Primitive::kPrimChar: {
4159 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4160 break;
4161 }
4162
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004163 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004164 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4165 break;
4166 }
4167
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004168 case Primitive::kPrimNot: {
4169 // /* HeapReference<Object> */ out = *(base + offset)
4170 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4171 Location temp_loc = locations->GetTemp(0);
4172 // Note that a potential implicit null check is handled in this
4173 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4174 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4175 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4176 if (is_volatile) {
4177 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4178 }
4179 } else {
4180 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4181 codegen_->MaybeRecordImplicitNullCheck(instruction);
4182 if (is_volatile) {
4183 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4184 }
4185 // If read barriers are enabled, emit read barriers other than
4186 // Baker's using a slow path (and also unpoison the loaded
4187 // reference, if heap poisoning is enabled).
4188 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4189 }
4190 break;
4191 }
4192
Calin Juravle52c48962014-12-16 17:02:57 +00004193 case Primitive::kPrimLong: {
4194 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4195 break;
4196 }
4197
4198 case Primitive::kPrimFloat: {
4199 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4200 break;
4201 }
4202
4203 case Primitive::kPrimDouble: {
4204 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4205 break;
4206 }
4207
4208 case Primitive::kPrimVoid:
4209 LOG(FATAL) << "Unreachable type " << field_type;
4210 UNREACHABLE();
4211 }
4212
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004213 if (field_type == Primitive::kPrimNot) {
4214 // Potential implicit null checks, in the case of reference
4215 // fields, are handled in the previous switch statement.
4216 } else {
4217 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004218 }
Roland Levillain4d027112015-07-01 15:41:14 +01004219
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004220 if (is_volatile) {
4221 if (field_type == Primitive::kPrimNot) {
4222 // Memory barriers, in the case of references, are also handled
4223 // in the previous switch statement.
4224 } else {
4225 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4226 }
Roland Levillain4d027112015-07-01 15:41:14 +01004227 }
Calin Juravle52c48962014-12-16 17:02:57 +00004228}
4229
4230void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4231 const FieldInfo& field_info) {
4232 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4233
4234 LocationSummary* locations =
4235 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004236 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004237 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004238 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004239 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004240
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004241 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004242 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004243 if (is_volatile) {
4244 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4245 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4246 } else {
4247 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4248 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004249 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004250 if (is_volatile) {
4251 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4252 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4253 } else {
4254 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4255 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004256 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004257 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004258 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004259 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004260 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004261 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4262 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004263 locations->AddTemp(Location::RequiresRegister());
4264 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004265}
4266
Calin Juravle52c48962014-12-16 17:02:57 +00004267void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004268 const FieldInfo& field_info,
4269 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004270 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4271
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004272 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004273 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4274 Location value = locations->InAt(1);
4275 bool is_volatile = field_info.IsVolatile();
4276 Primitive::Type field_type = field_info.GetFieldType();
4277 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4278
4279 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004280 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004281 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004282
Mark Mendellea5af682015-10-22 17:35:49 -04004283 bool maybe_record_implicit_null_check_done = false;
4284
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004285 switch (field_type) {
4286 case Primitive::kPrimBoolean:
4287 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004288 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004289 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004290 __ movb(Address(base, offset), Immediate(v));
4291 } else {
4292 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4293 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004294 break;
4295 }
4296
4297 case Primitive::kPrimShort:
4298 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004299 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004300 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004301 __ movw(Address(base, offset), Immediate(v));
4302 } else {
4303 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4304 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004305 break;
4306 }
4307
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004308 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004309 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004310 if (value.IsConstant()) {
4311 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004312 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4313 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4314 // Note: if heap poisoning is enabled, no need to poison
4315 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004316 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004317 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004318 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4319 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4320 __ movl(temp, value.AsRegister<CpuRegister>());
4321 __ PoisonHeapReference(temp);
4322 __ movl(Address(base, offset), temp);
4323 } else {
4324 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4325 }
Mark Mendell40741f32015-04-20 22:10:34 -04004326 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004327 break;
4328 }
4329
4330 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004331 if (value.IsConstant()) {
4332 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004333 codegen_->MoveInt64ToAddress(Address(base, offset),
4334 Address(base, offset + sizeof(int32_t)),
4335 v,
4336 instruction);
4337 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004338 } else {
4339 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4340 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004341 break;
4342 }
4343
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004344 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004345 if (value.IsConstant()) {
4346 int32_t v =
4347 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4348 __ movl(Address(base, offset), Immediate(v));
4349 } else {
4350 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4351 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004352 break;
4353 }
4354
4355 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004356 if (value.IsConstant()) {
4357 int64_t v =
4358 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4359 codegen_->MoveInt64ToAddress(Address(base, offset),
4360 Address(base, offset + sizeof(int32_t)),
4361 v,
4362 instruction);
4363 maybe_record_implicit_null_check_done = true;
4364 } else {
4365 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4366 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004367 break;
4368 }
4369
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004370 case Primitive::kPrimVoid:
4371 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004372 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004373 }
Calin Juravle52c48962014-12-16 17:02:57 +00004374
Mark Mendellea5af682015-10-22 17:35:49 -04004375 if (!maybe_record_implicit_null_check_done) {
4376 codegen_->MaybeRecordImplicitNullCheck(instruction);
4377 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004378
4379 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4380 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4381 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004382 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004383 }
4384
Calin Juravle52c48962014-12-16 17:02:57 +00004385 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004386 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004387 }
4388}
4389
4390void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4391 HandleFieldSet(instruction, instruction->GetFieldInfo());
4392}
4393
4394void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004395 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004396}
4397
4398void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004399 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004400}
4401
4402void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004403 HandleFieldGet(instruction, instruction->GetFieldInfo());
4404}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004405
Calin Juravle52c48962014-12-16 17:02:57 +00004406void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4407 HandleFieldGet(instruction);
4408}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409
Calin Juravle52c48962014-12-16 17:02:57 +00004410void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4411 HandleFieldGet(instruction, instruction->GetFieldInfo());
4412}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004413
Calin Juravle52c48962014-12-16 17:02:57 +00004414void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4415 HandleFieldSet(instruction, instruction->GetFieldInfo());
4416}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004417
Calin Juravle52c48962014-12-16 17:02:57 +00004418void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004419 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004420}
4421
Calin Juravlee460d1d2015-09-29 04:52:17 +01004422void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4423 HUnresolvedInstanceFieldGet* instruction) {
4424 FieldAccessCallingConventionX86_64 calling_convention;
4425 codegen_->CreateUnresolvedFieldLocationSummary(
4426 instruction, instruction->GetFieldType(), calling_convention);
4427}
4428
4429void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4430 HUnresolvedInstanceFieldGet* instruction) {
4431 FieldAccessCallingConventionX86_64 calling_convention;
4432 codegen_->GenerateUnresolvedFieldAccess(instruction,
4433 instruction->GetFieldType(),
4434 instruction->GetFieldIndex(),
4435 instruction->GetDexPc(),
4436 calling_convention);
4437}
4438
4439void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4440 HUnresolvedInstanceFieldSet* instruction) {
4441 FieldAccessCallingConventionX86_64 calling_convention;
4442 codegen_->CreateUnresolvedFieldLocationSummary(
4443 instruction, instruction->GetFieldType(), calling_convention);
4444}
4445
4446void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4447 HUnresolvedInstanceFieldSet* instruction) {
4448 FieldAccessCallingConventionX86_64 calling_convention;
4449 codegen_->GenerateUnresolvedFieldAccess(instruction,
4450 instruction->GetFieldType(),
4451 instruction->GetFieldIndex(),
4452 instruction->GetDexPc(),
4453 calling_convention);
4454}
4455
4456void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4457 HUnresolvedStaticFieldGet* instruction) {
4458 FieldAccessCallingConventionX86_64 calling_convention;
4459 codegen_->CreateUnresolvedFieldLocationSummary(
4460 instruction, instruction->GetFieldType(), calling_convention);
4461}
4462
4463void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4464 HUnresolvedStaticFieldGet* instruction) {
4465 FieldAccessCallingConventionX86_64 calling_convention;
4466 codegen_->GenerateUnresolvedFieldAccess(instruction,
4467 instruction->GetFieldType(),
4468 instruction->GetFieldIndex(),
4469 instruction->GetDexPc(),
4470 calling_convention);
4471}
4472
4473void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4474 HUnresolvedStaticFieldSet* instruction) {
4475 FieldAccessCallingConventionX86_64 calling_convention;
4476 codegen_->CreateUnresolvedFieldLocationSummary(
4477 instruction, instruction->GetFieldType(), calling_convention);
4478}
4479
4480void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4481 HUnresolvedStaticFieldSet* instruction) {
4482 FieldAccessCallingConventionX86_64 calling_convention;
4483 codegen_->GenerateUnresolvedFieldAccess(instruction,
4484 instruction->GetFieldType(),
4485 instruction->GetFieldIndex(),
4486 instruction->GetDexPc(),
4487 calling_convention);
4488}
4489
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004490void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004491 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4492 ? LocationSummary::kCallOnSlowPath
4493 : LocationSummary::kNoCall;
4494 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4495 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004496 ? Location::RequiresRegister()
4497 : Location::Any();
4498 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004499 if (instruction->HasUses()) {
4500 locations->SetOut(Location::SameAsFirstInput());
4501 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004502}
4503
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004504void InstructionCodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004505 if (codegen_->CanMoveNullCheckToUser(instruction)) {
4506 return;
4507 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004508 LocationSummary* locations = instruction->GetLocations();
4509 Location obj = locations->InAt(0);
4510
4511 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
4512 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4513}
4514
4515void InstructionCodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004516 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004517 codegen_->AddSlowPath(slow_path);
4518
4519 LocationSummary* locations = instruction->GetLocations();
4520 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004521
4522 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004523 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004524 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004525 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004526 } else {
4527 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004528 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004529 __ jmp(slow_path->GetEntryLabel());
4530 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004531 }
4532 __ j(kEqual, slow_path->GetEntryLabel());
4533}
4534
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004535void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004536 if (codegen_->IsImplicitNullCheckAllowed(instruction)) {
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004537 GenerateImplicitNullCheck(instruction);
4538 } else {
4539 GenerateExplicitNullCheck(instruction);
4540 }
4541}
4542
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004543void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004544 bool object_array_get_with_read_barrier =
4545 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004546 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004547 new (GetGraph()->GetArena()) LocationSummary(instruction,
4548 object_array_get_with_read_barrier ?
4549 LocationSummary::kCallOnSlowPath :
4550 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004551 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004552 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004553 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4554 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4555 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004556 // The output overlaps for an object array get when read barriers
4557 // are enabled: we do not want the move to overwrite the array's
4558 // location, as we need it to emit the read barrier.
4559 locations->SetOut(
4560 Location::RequiresRegister(),
4561 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004562 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004563 // We need a temporary register for the read barrier marking slow
4564 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4565 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4566 locations->AddTemp(Location::RequiresRegister());
4567 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004568}
4569
4570void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4571 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004572 Location obj_loc = locations->InAt(0);
4573 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004574 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004575 Location out_loc = locations->Out();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004576
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004577 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004578 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004579 case Primitive::kPrimBoolean: {
4580 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004581 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004582 if (index.IsConstant()) {
4583 __ movzxb(out, Address(obj,
4584 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4585 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004586 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004587 }
4588 break;
4589 }
4590
4591 case Primitive::kPrimByte: {
4592 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004593 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004594 if (index.IsConstant()) {
4595 __ movsxb(out, Address(obj,
4596 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4597 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004598 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004599 }
4600 break;
4601 }
4602
4603 case Primitive::kPrimShort: {
4604 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004605 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606 if (index.IsConstant()) {
4607 __ movsxw(out, Address(obj,
4608 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4609 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004610 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004611 }
4612 break;
4613 }
4614
4615 case Primitive::kPrimChar: {
4616 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004617 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004618 if (index.IsConstant()) {
4619 __ movzxw(out, Address(obj,
4620 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4621 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004622 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004623 }
4624 break;
4625 }
4626
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004627 case Primitive::kPrimInt: {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004628 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004629 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004630 if (index.IsConstant()) {
4631 __ movl(out, Address(obj,
4632 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4633 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004634 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004635 }
4636 break;
4637 }
4638
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004639 case Primitive::kPrimNot: {
4640 static_assert(
4641 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4642 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4644 // /* HeapReference<Object> */ out =
4645 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4646 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4647 Location temp = locations->GetTemp(0);
4648 // Note that a potential implicit null check is handled in this
4649 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4650 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4651 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4652 } else {
4653 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4654 if (index.IsConstant()) {
4655 uint32_t offset =
4656 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4657 __ movl(out, Address(obj, offset));
4658 codegen_->MaybeRecordImplicitNullCheck(instruction);
4659 // If read barriers are enabled, emit read barriers other than
4660 // Baker's using a slow path (and also unpoison the loaded
4661 // reference, if heap poisoning is enabled).
4662 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4663 } else {
4664 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4665 codegen_->MaybeRecordImplicitNullCheck(instruction);
4666 // If read barriers are enabled, emit read barriers other than
4667 // Baker's using a slow path (and also unpoison the loaded
4668 // reference, if heap poisoning is enabled).
4669 codegen_->MaybeGenerateReadBarrierSlow(
4670 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4671 }
4672 }
4673 break;
4674 }
4675
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004676 case Primitive::kPrimLong: {
4677 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004678 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004679 if (index.IsConstant()) {
4680 __ movq(out, Address(obj,
4681 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4682 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004683 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004684 }
4685 break;
4686 }
4687
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004688 case Primitive::kPrimFloat: {
4689 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004690 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004691 if (index.IsConstant()) {
4692 __ movss(out, Address(obj,
4693 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4694 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004695 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004696 }
4697 break;
4698 }
4699
4700 case Primitive::kPrimDouble: {
4701 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004702 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004703 if (index.IsConstant()) {
4704 __ movsd(out, Address(obj,
4705 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4706 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004707 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004708 }
4709 break;
4710 }
4711
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004712 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004713 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004714 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715 }
Roland Levillain4d027112015-07-01 15:41:14 +01004716
4717 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 // Potential implicit null checks, in the case of reference
4719 // arrays, are handled in the previous switch statement.
4720 } else {
4721 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004722 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004723}
4724
4725void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004726 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004727
4728 bool needs_write_barrier =
4729 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004730 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004731 bool object_array_set_with_read_barrier =
4732 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004733
Nicolas Geoffray39468442014-09-02 15:17:15 +01004734 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004735 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004736 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004737 LocationSummary::kCallOnSlowPath :
4738 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004739
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004740 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004741 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4742 if (Primitive::IsFloatingPointType(value_type)) {
4743 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004744 } else {
4745 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4746 }
4747
4748 if (needs_write_barrier) {
4749 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004750
4751 // This first temporary register is possibly used for heap
4752 // reference poisoning and/or read barrier emission too.
4753 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004754 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004755 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004756}
4757
4758void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4759 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004760 Location array_loc = locations->InAt(0);
4761 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004762 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004763 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004764 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004765 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004766 bool needs_write_barrier =
4767 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004768 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4769 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4770 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004771
4772 switch (value_type) {
4773 case Primitive::kPrimBoolean:
4774 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004775 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4776 Address address = index.IsConstant()
4777 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4778 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4779 if (value.IsRegister()) {
4780 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004781 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004782 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004783 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004784 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004785 break;
4786 }
4787
4788 case Primitive::kPrimShort:
4789 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004790 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4791 Address address = index.IsConstant()
4792 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4793 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4794 if (value.IsRegister()) {
4795 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004796 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004797 DCHECK(value.IsConstant()) << value;
4798 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004799 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004800 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004801 break;
4802 }
4803
4804 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004805 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4806 Address address = index.IsConstant()
4807 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4808 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004809
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004810 if (!value.IsRegister()) {
4811 // Just setting null.
4812 DCHECK(instruction->InputAt(2)->IsNullConstant());
4813 DCHECK(value.IsConstant()) << value;
4814 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004815 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004816 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004817 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004818 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004819 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004820
4821 DCHECK(needs_write_barrier);
4822 CpuRegister register_value = value.AsRegister<CpuRegister>();
4823 NearLabel done, not_null, do_put;
4824 SlowPathCode* slow_path = nullptr;
4825 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004826 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004827 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4828 codegen_->AddSlowPath(slow_path);
4829 if (instruction->GetValueCanBeNull()) {
4830 __ testl(register_value, register_value);
4831 __ j(kNotEqual, &not_null);
4832 __ movl(address, Immediate(0));
4833 codegen_->MaybeRecordImplicitNullCheck(instruction);
4834 __ jmp(&done);
4835 __ Bind(&not_null);
4836 }
4837
Roland Levillain0d5a2812015-11-13 10:07:31 +00004838 if (kEmitCompilerReadBarrier) {
4839 // When read barriers are enabled, the type checking
4840 // instrumentation requires two read barriers:
4841 //
4842 // __ movl(temp2, temp);
4843 // // /* HeapReference<Class> */ temp = temp->component_type_
4844 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004845 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004846 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4847 //
4848 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4849 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004850 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004851 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4852 //
4853 // __ cmpl(temp, temp2);
4854 //
4855 // However, the second read barrier may trash `temp`, as it
4856 // is a temporary register, and as such would not be saved
4857 // along with live registers before calling the runtime (nor
4858 // restored afterwards). So in this case, we bail out and
4859 // delegate the work to the array set slow path.
4860 //
4861 // TODO: Extend the register allocator to support a new
4862 // "(locally) live temp" location so as to avoid always
4863 // going into the slow path when read barriers are enabled.
4864 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004865 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004866 // /* HeapReference<Class> */ temp = array->klass_
4867 __ movl(temp, Address(array, class_offset));
4868 codegen_->MaybeRecordImplicitNullCheck(instruction);
4869 __ MaybeUnpoisonHeapReference(temp);
4870
4871 // /* HeapReference<Class> */ temp = temp->component_type_
4872 __ movl(temp, Address(temp, component_offset));
4873 // If heap poisoning is enabled, no need to unpoison `temp`
4874 // nor the object reference in `register_value->klass`, as
4875 // we are comparing two poisoned references.
4876 __ cmpl(temp, Address(register_value, class_offset));
4877
4878 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4879 __ j(kEqual, &do_put);
4880 // If heap poisoning is enabled, the `temp` reference has
4881 // not been unpoisoned yet; unpoison it now.
4882 __ MaybeUnpoisonHeapReference(temp);
4883
4884 // /* HeapReference<Class> */ temp = temp->super_class_
4885 __ movl(temp, Address(temp, super_offset));
4886 // If heap poisoning is enabled, no need to unpoison
4887 // `temp`, as we are comparing against null below.
4888 __ testl(temp, temp);
4889 __ j(kNotEqual, slow_path->GetEntryLabel());
4890 __ Bind(&do_put);
4891 } else {
4892 __ j(kNotEqual, slow_path->GetEntryLabel());
4893 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004894 }
4895 }
4896
4897 if (kPoisonHeapReferences) {
4898 __ movl(temp, register_value);
4899 __ PoisonHeapReference(temp);
4900 __ movl(address, temp);
4901 } else {
4902 __ movl(address, register_value);
4903 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004904 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004905 codegen_->MaybeRecordImplicitNullCheck(instruction);
4906 }
4907
4908 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4909 codegen_->MarkGCCard(
4910 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4911 __ Bind(&done);
4912
4913 if (slow_path != nullptr) {
4914 __ Bind(slow_path->GetExitLabel());
4915 }
4916
4917 break;
4918 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004919
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004920 case Primitive::kPrimInt: {
4921 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4922 Address address = index.IsConstant()
4923 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4924 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4925 if (value.IsRegister()) {
4926 __ movl(address, value.AsRegister<CpuRegister>());
4927 } else {
4928 DCHECK(value.IsConstant()) << value;
4929 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4930 __ movl(address, Immediate(v));
4931 }
4932 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004933 break;
4934 }
4935
4936 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004937 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4938 Address address = index.IsConstant()
4939 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4940 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4941 if (value.IsRegister()) {
4942 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004943 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004944 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004945 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004946 Address address_high = index.IsConstant()
4947 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4948 offset + sizeof(int32_t))
4949 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4950 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004951 }
4952 break;
4953 }
4954
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004955 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004956 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4957 Address address = index.IsConstant()
4958 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4959 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004960 if (value.IsFpuRegister()) {
4961 __ movss(address, value.AsFpuRegister<XmmRegister>());
4962 } else {
4963 DCHECK(value.IsConstant());
4964 int32_t v =
4965 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4966 __ movl(address, Immediate(v));
4967 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004968 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004969 break;
4970 }
4971
4972 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004973 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4974 Address address = index.IsConstant()
4975 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4976 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004977 if (value.IsFpuRegister()) {
4978 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4979 codegen_->MaybeRecordImplicitNullCheck(instruction);
4980 } else {
4981 int64_t v =
4982 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4983 Address address_high = index.IsConstant()
4984 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4985 offset + sizeof(int32_t))
4986 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4987 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4988 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004989 break;
4990 }
4991
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004992 case Primitive::kPrimVoid:
4993 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004994 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004995 }
4996}
4997
4998void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004999 LocationSummary* locations =
5000 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005001 locations->SetInAt(0, Location::RequiresRegister());
5002 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005003}
5004
5005void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
5006 LocationSummary* locations = instruction->GetLocations();
5007 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
Roland Levillain271ab9c2014-11-27 15:23:57 +00005008 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5009 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005010 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005011 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005012}
5013
5014void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005015 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5016 ? LocationSummary::kCallOnSlowPath
5017 : LocationSummary::kNoCall;
5018 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005019 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04005020 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005021 if (instruction->HasUses()) {
5022 locations->SetOut(Location::SameAsFirstInput());
5023 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005024}
5025
5026void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5027 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005028 Location index_loc = locations->InAt(0);
5029 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005030 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005031 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005032
Mark Mendell99dbd682015-04-22 16:18:52 -04005033 if (length_loc.IsConstant()) {
5034 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5035 if (index_loc.IsConstant()) {
5036 // BCE will remove the bounds check if we are guarenteed to pass.
5037 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5038 if (index < 0 || index >= length) {
5039 codegen_->AddSlowPath(slow_path);
5040 __ jmp(slow_path->GetEntryLabel());
5041 } else {
5042 // Some optimization after BCE may have generated this, and we should not
5043 // generate a bounds check if it is a valid range.
5044 }
5045 return;
5046 }
5047
5048 // We have to reverse the jump condition because the length is the constant.
5049 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5050 __ cmpl(index_reg, Immediate(length));
5051 codegen_->AddSlowPath(slow_path);
5052 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005053 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005054 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5055 if (index_loc.IsConstant()) {
5056 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5057 __ cmpl(length, Immediate(value));
5058 } else {
5059 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5060 }
5061 codegen_->AddSlowPath(slow_path);
5062 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005063 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005064}
5065
5066void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5067 CpuRegister card,
5068 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005069 CpuRegister value,
5070 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005071 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005072 if (value_can_be_null) {
5073 __ testl(value, value);
5074 __ j(kEqual, &is_null);
5075 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005076 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5077 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005078 __ movq(temp, object);
5079 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005080 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005081 if (value_can_be_null) {
5082 __ Bind(&is_null);
5083 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005084}
5085
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005086void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005087 LOG(FATAL) << "Unimplemented";
5088}
5089
5090void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005091 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5092}
5093
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005094void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5095 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5096}
5097
5098void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005099 HBasicBlock* block = instruction->GetBlock();
5100 if (block->GetLoopInformation() != nullptr) {
5101 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5102 // The back edge will generate the suspend check.
5103 return;
5104 }
5105 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5106 // The goto will generate the suspend check.
5107 return;
5108 }
5109 GenerateSuspendCheck(instruction, nullptr);
5110}
5111
5112void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5113 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005114 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005115 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5116 if (slow_path == nullptr) {
5117 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5118 instruction->SetSlowPath(slow_path);
5119 codegen_->AddSlowPath(slow_path);
5120 if (successor != nullptr) {
5121 DCHECK(successor->IsLoopHeader());
5122 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5123 }
5124 } else {
5125 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5126 }
5127
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005128 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5129 /* no_rip */ true),
5130 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005131 if (successor == nullptr) {
5132 __ j(kNotEqual, slow_path->GetEntryLabel());
5133 __ Bind(slow_path->GetReturnLabel());
5134 } else {
5135 __ j(kEqual, codegen_->GetLabelOf(successor));
5136 __ jmp(slow_path->GetEntryLabel());
5137 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005138}
5139
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005140X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5141 return codegen_->GetAssembler();
5142}
5143
5144void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005145 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005146 Location source = move->GetSource();
5147 Location destination = move->GetDestination();
5148
5149 if (source.IsRegister()) {
5150 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005151 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005152 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005153 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005154 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005155 } else {
5156 DCHECK(destination.IsDoubleStackSlot());
5157 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005158 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005159 }
5160 } else if (source.IsStackSlot()) {
5161 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005162 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005163 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005164 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005165 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005166 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005167 } else {
5168 DCHECK(destination.IsStackSlot());
5169 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5170 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5171 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005172 } else if (source.IsDoubleStackSlot()) {
5173 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005174 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005175 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005176 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005177 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5178 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005179 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005180 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005181 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5182 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5183 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005184 } else if (source.IsConstant()) {
5185 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005186 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5187 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005188 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005189 if (value == 0) {
5190 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5191 } else {
5192 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5193 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005194 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005195 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005196 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005197 }
5198 } else if (constant->IsLongConstant()) {
5199 int64_t value = constant->AsLongConstant()->GetValue();
5200 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005201 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005202 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005203 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005204 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005205 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005206 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005207 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005208 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005209 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005210 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005211 } else {
5212 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005213 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005214 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5215 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005216 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005217 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005218 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005219 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005220 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005221 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005222 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005223 } else {
5224 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005225 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005226 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005227 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005228 } else if (source.IsFpuRegister()) {
5229 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005230 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005231 } else if (destination.IsStackSlot()) {
5232 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005233 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005234 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005235 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005236 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005237 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005238 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005239 }
5240}
5241
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005242void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005243 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005244 __ movl(Address(CpuRegister(RSP), mem), reg);
5245 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005246}
5247
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005248void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005249 ScratchRegisterScope ensure_scratch(
5250 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5251
5252 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5253 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5254 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5255 Address(CpuRegister(RSP), mem2 + stack_offset));
5256 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5257 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5258 CpuRegister(ensure_scratch.GetRegister()));
5259}
5260
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005261void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5262 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5263 __ movq(Address(CpuRegister(RSP), mem), reg);
5264 __ movq(reg, CpuRegister(TMP));
5265}
5266
5267void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5268 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005269 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005270
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005271 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5272 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5273 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5274 Address(CpuRegister(RSP), mem2 + stack_offset));
5275 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5276 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5277 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005278}
5279
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005280void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5281 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5282 __ movss(Address(CpuRegister(RSP), mem), reg);
5283 __ movd(reg, CpuRegister(TMP));
5284}
5285
5286void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5287 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5288 __ movsd(Address(CpuRegister(RSP), mem), reg);
5289 __ movd(reg, CpuRegister(TMP));
5290}
5291
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005292void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005293 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005294 Location source = move->GetSource();
5295 Location destination = move->GetDestination();
5296
5297 if (source.IsRegister() && destination.IsRegister()) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005298 __ xchgq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005299 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005300 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005301 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005302 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005303 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005304 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5305 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005306 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005307 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005308 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005309 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5310 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005311 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005312 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5313 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5314 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005315 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005316 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005317 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005318 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005319 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005320 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005321 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005322 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005323 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005324 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005325 }
5326}
5327
5328
5329void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5330 __ pushq(CpuRegister(reg));
5331}
5332
5333
5334void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5335 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005336}
5337
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005338void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005339 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005340 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5341 Immediate(mirror::Class::kStatusInitialized));
5342 __ j(kLess, slow_path->GetEntryLabel());
5343 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005344 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005345}
5346
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005347void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01005348 InvokeRuntimeCallingConvention calling_convention;
5349 CodeGenerator::CreateLoadClassLocationSummary(
5350 cls,
5351 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Roland Levillain0d5a2812015-11-13 10:07:31 +00005352 Location::RegisterLocation(RAX),
5353 /* code_generator_supports_read_barrier */ true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005354}
5355
5356void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005357 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005358 if (cls->NeedsAccessCheck()) {
5359 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5360 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5361 cls,
5362 cls->GetDexPc(),
5363 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005364 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005365 return;
5366 }
5367
Roland Levillain0d5a2812015-11-13 10:07:31 +00005368 Location out_loc = locations->Out();
5369 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Calin Juravle580b6092015-10-06 17:35:58 +01005370 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005371
Calin Juravle580b6092015-10-06 17:35:58 +01005372 if (cls->IsReferrersClass()) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005373 DCHECK(!cls->CanCallRuntime());
5374 DCHECK(!cls->MustGenerateClinitCheck());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005375 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5376 GenerateGcRootFieldLoad(
5377 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005378 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005379 // /* GcRoot<mirror::Class>[] */ out =
5380 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5381 __ movq(out, Address(current_method,
5382 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005383 // /* GcRoot<mirror::Class> */ out = out[type_index]
5384 GenerateGcRootFieldLoad(cls, out_loc, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
Roland Levillain4d027112015-07-01 15:41:14 +01005385
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005386 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
5387 DCHECK(cls->CanCallRuntime());
5388 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5389 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5390 codegen_->AddSlowPath(slow_path);
5391 if (!cls->IsInDexCache()) {
5392 __ testl(out, out);
5393 __ j(kEqual, slow_path->GetEntryLabel());
5394 }
5395 if (cls->MustGenerateClinitCheck()) {
5396 GenerateClassInitializationCheck(slow_path, out);
5397 } else {
5398 __ Bind(slow_path->GetExitLabel());
5399 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005400 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005401 }
5402}
5403
5404void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5405 LocationSummary* locations =
5406 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5407 locations->SetInAt(0, Location::RequiresRegister());
5408 if (check->HasUses()) {
5409 locations->SetOut(Location::SameAsFirstInput());
5410 }
5411}
5412
5413void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005414 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005415 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005416 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005417 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005418 GenerateClassInitializationCheck(slow_path,
5419 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005420}
5421
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005422void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005423 LocationSummary::CallKind call_kind = (!load->IsInDexCache() || kEmitCompilerReadBarrier)
5424 ? LocationSummary::kCallOnSlowPath
5425 : LocationSummary::kNoCall;
5426 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005427 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005428 locations->SetOut(Location::RequiresRegister());
5429}
5430
5431void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005432 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005433 Location out_loc = locations->Out();
5434 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005435 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005436
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005437 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5438 GenerateGcRootFieldLoad(
5439 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005440 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5441 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005442 // /* GcRoot<mirror::String> */ out = out[string_index]
5443 GenerateGcRootFieldLoad(
5444 load, out_loc, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005445
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005446 if (!load->IsInDexCache()) {
5447 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5448 codegen_->AddSlowPath(slow_path);
5449 __ testl(out, out);
5450 __ j(kEqual, slow_path->GetEntryLabel());
5451 __ Bind(slow_path->GetExitLabel());
5452 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005453}
5454
David Brazdilcb1c0552015-08-04 16:22:25 +01005455static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005456 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5457 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005458}
5459
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005460void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5461 LocationSummary* locations =
5462 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5463 locations->SetOut(Location::RequiresRegister());
5464}
5465
5466void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005467 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5468}
5469
5470void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5471 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5472}
5473
5474void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5475 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005476}
5477
5478void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5479 LocationSummary* locations =
5480 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5481 InvokeRuntimeCallingConvention calling_convention;
5482 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5483}
5484
5485void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005486 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5487 instruction,
5488 instruction->GetDexPc(),
5489 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005490 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005491}
5492
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005493static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5494 return kEmitCompilerReadBarrier &&
5495 (kUseBakerReadBarrier ||
5496 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5497 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5498 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5499}
5500
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005501void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005502 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005503 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5504 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005505 case TypeCheckKind::kExactCheck:
5506 case TypeCheckKind::kAbstractClassCheck:
5507 case TypeCheckKind::kClassHierarchyCheck:
5508 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005509 call_kind =
5510 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005511 break;
5512 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005513 case TypeCheckKind::kUnresolvedCheck:
5514 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005515 call_kind = LocationSummary::kCallOnSlowPath;
5516 break;
5517 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005518
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005519 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005520 locations->SetInAt(0, Location::RequiresRegister());
5521 locations->SetInAt(1, Location::Any());
5522 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5523 locations->SetOut(Location::RequiresRegister());
5524 // When read barriers are enabled, we need a temporary register for
5525 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005526 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005527 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005528 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005529}
5530
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005531void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005532 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005533 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005534 Location obj_loc = locations->InAt(0);
5535 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005536 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005537 Location out_loc = locations->Out();
5538 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005539 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005540 locations->GetTemp(0) :
5541 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005542 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005543 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5544 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5545 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005546 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005547 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005548
5549 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005550 // Avoid null check if we know obj is not null.
5551 if (instruction->MustDoNullCheck()) {
5552 __ testl(obj, obj);
5553 __ j(kEqual, &zero);
5554 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005555
Roland Levillain0d5a2812015-11-13 10:07:31 +00005556 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005557 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005558
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005559 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005560 case TypeCheckKind::kExactCheck: {
5561 if (cls.IsRegister()) {
5562 __ cmpl(out, cls.AsRegister<CpuRegister>());
5563 } else {
5564 DCHECK(cls.IsStackSlot()) << cls;
5565 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5566 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005567 if (zero.IsLinked()) {
5568 // Classes must be equal for the instanceof to succeed.
5569 __ j(kNotEqual, &zero);
5570 __ movl(out, Immediate(1));
5571 __ jmp(&done);
5572 } else {
5573 __ setcc(kEqual, out);
5574 // setcc only sets the low byte.
5575 __ andl(out, Immediate(1));
5576 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005577 break;
5578 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005579
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005580 case TypeCheckKind::kAbstractClassCheck: {
5581 // If the class is abstract, we eagerly fetch the super class of the
5582 // object to avoid doing a comparison we know will fail.
5583 NearLabel loop, success;
5584 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005585 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005586 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005587 __ testl(out, out);
5588 // If `out` is null, we use it for the result, and jump to `done`.
5589 __ j(kEqual, &done);
5590 if (cls.IsRegister()) {
5591 __ cmpl(out, cls.AsRegister<CpuRegister>());
5592 } else {
5593 DCHECK(cls.IsStackSlot()) << cls;
5594 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5595 }
5596 __ j(kNotEqual, &loop);
5597 __ movl(out, Immediate(1));
5598 if (zero.IsLinked()) {
5599 __ jmp(&done);
5600 }
5601 break;
5602 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005603
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005604 case TypeCheckKind::kClassHierarchyCheck: {
5605 // Walk over the class hierarchy to find a match.
5606 NearLabel loop, success;
5607 __ Bind(&loop);
5608 if (cls.IsRegister()) {
5609 __ cmpl(out, cls.AsRegister<CpuRegister>());
5610 } else {
5611 DCHECK(cls.IsStackSlot()) << cls;
5612 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5613 }
5614 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005615 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005616 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005617 __ testl(out, out);
5618 __ j(kNotEqual, &loop);
5619 // If `out` is null, we use it for the result, and jump to `done`.
5620 __ jmp(&done);
5621 __ Bind(&success);
5622 __ movl(out, Immediate(1));
5623 if (zero.IsLinked()) {
5624 __ jmp(&done);
5625 }
5626 break;
5627 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005628
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005629 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005630 // Do an exact check.
5631 NearLabel exact_check;
5632 if (cls.IsRegister()) {
5633 __ cmpl(out, cls.AsRegister<CpuRegister>());
5634 } else {
5635 DCHECK(cls.IsStackSlot()) << cls;
5636 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5637 }
5638 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005639 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005640 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005641 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005642 __ testl(out, out);
5643 // If `out` is null, we use it for the result, and jump to `done`.
5644 __ j(kEqual, &done);
5645 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5646 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005647 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005648 __ movl(out, Immediate(1));
5649 __ jmp(&done);
5650 break;
5651 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005652
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005653 case TypeCheckKind::kArrayCheck: {
5654 if (cls.IsRegister()) {
5655 __ cmpl(out, cls.AsRegister<CpuRegister>());
5656 } else {
5657 DCHECK(cls.IsStackSlot()) << cls;
5658 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5659 }
5660 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005661 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5662 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005663 codegen_->AddSlowPath(slow_path);
5664 __ j(kNotEqual, slow_path->GetEntryLabel());
5665 __ movl(out, Immediate(1));
5666 if (zero.IsLinked()) {
5667 __ jmp(&done);
5668 }
5669 break;
5670 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005671
Calin Juravle98893e12015-10-02 21:05:03 +01005672 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005673 case TypeCheckKind::kInterfaceCheck: {
5674 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005675 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005676 // cases.
5677 //
5678 // We cannot directly call the InstanceofNonTrivial runtime
5679 // entry point without resorting to a type checking slow path
5680 // here (i.e. by calling InvokeRuntime directly), as it would
5681 // require to assign fixed registers for the inputs of this
5682 // HInstanceOf instruction (following the runtime calling
5683 // convention), which might be cluttered by the potential first
5684 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005685 //
5686 // TODO: Introduce a new runtime entry point taking the object
5687 // to test (instead of its class) as argument, and let it deal
5688 // with the read barrier issues. This will let us refactor this
5689 // case of the `switch` code as it was previously (with a direct
5690 // call to the runtime not using a type checking slow path).
5691 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005692 DCHECK(locations->OnlyCallsOnSlowPath());
5693 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5694 /* is_fatal */ false);
5695 codegen_->AddSlowPath(slow_path);
5696 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005697 if (zero.IsLinked()) {
5698 __ jmp(&done);
5699 }
5700 break;
5701 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005702 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005703
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005704 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005705 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005706 __ xorl(out, out);
5707 }
5708
5709 if (done.IsLinked()) {
5710 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005711 }
5712
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005713 if (slow_path != nullptr) {
5714 __ Bind(slow_path->GetExitLabel());
5715 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005716}
5717
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005718void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005719 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5720 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005721 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5722 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005723 case TypeCheckKind::kExactCheck:
5724 case TypeCheckKind::kAbstractClassCheck:
5725 case TypeCheckKind::kClassHierarchyCheck:
5726 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005727 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5728 LocationSummary::kCallOnSlowPath :
5729 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005730 break;
5731 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005732 case TypeCheckKind::kUnresolvedCheck:
5733 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005734 call_kind = LocationSummary::kCallOnSlowPath;
5735 break;
5736 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005737 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5738 locations->SetInAt(0, Location::RequiresRegister());
5739 locations->SetInAt(1, Location::Any());
5740 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5741 locations->AddTemp(Location::RequiresRegister());
5742 // When read barriers are enabled, we need an additional temporary
5743 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005744 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005745 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005746 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005747}
5748
5749void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005750 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005751 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005752 Location obj_loc = locations->InAt(0);
5753 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005754 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005755 Location temp_loc = locations->GetTemp(0);
5756 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005757 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005758 locations->GetTemp(1) :
5759 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005760 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5761 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5762 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5763 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005764
Roland Levillain0d5a2812015-11-13 10:07:31 +00005765 bool is_type_check_slow_path_fatal =
5766 (type_check_kind == TypeCheckKind::kExactCheck ||
5767 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5768 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5769 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5770 !instruction->CanThrowIntoCatchBlock();
5771 SlowPathCode* type_check_slow_path =
5772 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5773 is_type_check_slow_path_fatal);
5774 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005775
Roland Levillain0d5a2812015-11-13 10:07:31 +00005776 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005777 case TypeCheckKind::kExactCheck:
5778 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005779 NearLabel done;
5780 // Avoid null check if we know obj is not null.
5781 if (instruction->MustDoNullCheck()) {
5782 __ testl(obj, obj);
5783 __ j(kEqual, &done);
5784 }
5785
5786 // /* HeapReference<Class> */ temp = obj->klass_
5787 GenerateReferenceLoadTwoRegisters(
5788 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5789
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005790 if (cls.IsRegister()) {
5791 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5792 } else {
5793 DCHECK(cls.IsStackSlot()) << cls;
5794 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5795 }
5796 // Jump to slow path for throwing the exception or doing a
5797 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005798 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005799 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005800 break;
5801 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005802
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005803 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005804 NearLabel done;
5805 // Avoid null check if we know obj is not null.
5806 if (instruction->MustDoNullCheck()) {
5807 __ testl(obj, obj);
5808 __ j(kEqual, &done);
5809 }
5810
5811 // /* HeapReference<Class> */ temp = obj->klass_
5812 GenerateReferenceLoadTwoRegisters(
5813 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5814
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005815 // If the class is abstract, we eagerly fetch the super class of the
5816 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005817 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005818 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005819 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005820 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005821
5822 // If the class reference currently in `temp` is not null, jump
5823 // to the `compare_classes` label to compare it with the checked
5824 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005825 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005826 __ j(kNotEqual, &compare_classes);
5827 // Otherwise, jump to the slow path to throw the exception.
5828 //
5829 // But before, move back the object's class into `temp` before
5830 // going into the slow path, as it has been overwritten in the
5831 // meantime.
5832 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005833 GenerateReferenceLoadTwoRegisters(
5834 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005835 __ jmp(type_check_slow_path->GetEntryLabel());
5836
5837 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005838 if (cls.IsRegister()) {
5839 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5840 } else {
5841 DCHECK(cls.IsStackSlot()) << cls;
5842 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5843 }
5844 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005845 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005846 break;
5847 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005848
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005849 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005850 NearLabel done;
5851 // Avoid null check if we know obj is not null.
5852 if (instruction->MustDoNullCheck()) {
5853 __ testl(obj, obj);
5854 __ j(kEqual, &done);
5855 }
5856
5857 // /* HeapReference<Class> */ temp = obj->klass_
5858 GenerateReferenceLoadTwoRegisters(
5859 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5860
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005861 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005862 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005863 __ Bind(&loop);
5864 if (cls.IsRegister()) {
5865 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5866 } else {
5867 DCHECK(cls.IsStackSlot()) << cls;
5868 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5869 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005870 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005871
Roland Levillain0d5a2812015-11-13 10:07:31 +00005872 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005873 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874
5875 // If the class reference currently in `temp` is not null, jump
5876 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005877 __ testl(temp, temp);
5878 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005879 // Otherwise, jump to the slow path to throw the exception.
5880 //
5881 // But before, move back the object's class into `temp` before
5882 // going into the slow path, as it has been overwritten in the
5883 // meantime.
5884 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005885 GenerateReferenceLoadTwoRegisters(
5886 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005887 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005888 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005889 break;
5890 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005891
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005892 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005893 // We cannot use a NearLabel here, as its range might be too
5894 // short in some cases when read barriers are enabled. This has
5895 // been observed for instance when the code emitted for this
5896 // case uses high x86-64 registers (R8-R15).
5897 Label done;
5898 // Avoid null check if we know obj is not null.
5899 if (instruction->MustDoNullCheck()) {
5900 __ testl(obj, obj);
5901 __ j(kEqual, &done);
5902 }
5903
5904 // /* HeapReference<Class> */ temp = obj->klass_
5905 GenerateReferenceLoadTwoRegisters(
5906 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5907
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005908 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005909 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005910 if (cls.IsRegister()) {
5911 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5912 } else {
5913 DCHECK(cls.IsStackSlot()) << cls;
5914 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5915 }
5916 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005917
5918 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005919 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005920 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005921
5922 // If the component type is not null (i.e. the object is indeed
5923 // an array), jump to label `check_non_primitive_component_type`
5924 // to further check that this component type is not a primitive
5925 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005926 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005927 __ j(kNotEqual, &check_non_primitive_component_type);
5928 // Otherwise, jump to the slow path to throw the exception.
5929 //
5930 // But before, move back the object's class into `temp` before
5931 // going into the slow path, as it has been overwritten in the
5932 // meantime.
5933 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005934 GenerateReferenceLoadTwoRegisters(
5935 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005936 __ jmp(type_check_slow_path->GetEntryLabel());
5937
5938 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005940 __ j(kEqual, &done);
5941 // Same comment as above regarding `temp` and the slow path.
5942 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005943 GenerateReferenceLoadTwoRegisters(
5944 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005945 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005946 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005947 break;
5948 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005949
Calin Juravle98893e12015-10-02 21:05:03 +01005950 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005951 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005952 NearLabel done;
5953 // Avoid null check if we know obj is not null.
5954 if (instruction->MustDoNullCheck()) {
5955 __ testl(obj, obj);
5956 __ j(kEqual, &done);
5957 }
5958
5959 // /* HeapReference<Class> */ temp = obj->klass_
5960 GenerateReferenceLoadTwoRegisters(
5961 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5962
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005963 // We always go into the type check slow path for the unresolved
5964 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005965 //
5966 // We cannot directly call the CheckCast runtime entry point
5967 // without resorting to a type checking slow path here (i.e. by
5968 // calling InvokeRuntime directly), as it would require to
5969 // assign fixed registers for the inputs of this HInstanceOf
5970 // instruction (following the runtime calling convention), which
5971 // might be cluttered by the potential first read barrier
5972 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005973 //
5974 // TODO: Introduce a new runtime entry point taking the object
5975 // to test (instead of its class) as argument, and let it deal
5976 // with the read barrier issues. This will let us refactor this
5977 // case of the `switch` code as it was previously (with a direct
5978 // call to the runtime not using a type checking slow path).
5979 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005980 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005981 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005982 break;
5983 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005984
Roland Levillain0d5a2812015-11-13 10:07:31 +00005985 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005986}
5987
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005988void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5989 LocationSummary* locations =
5990 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5991 InvokeRuntimeCallingConvention calling_convention;
5992 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5993}
5994
5995void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005996 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
5997 : QUICK_ENTRY_POINT(pUnlockObject),
5998 instruction,
5999 instruction->GetDexPc(),
6000 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006001 if (instruction->IsEnter()) {
6002 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6003 } else {
6004 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6005 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006006}
6007
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006008void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6009void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6010void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6011
6012void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6013 LocationSummary* locations =
6014 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6015 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6016 || instruction->GetResultType() == Primitive::kPrimLong);
6017 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006018 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006019 locations->SetOut(Location::SameAsFirstInput());
6020}
6021
6022void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6023 HandleBitwiseOperation(instruction);
6024}
6025
6026void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6027 HandleBitwiseOperation(instruction);
6028}
6029
6030void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6031 HandleBitwiseOperation(instruction);
6032}
6033
6034void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6035 LocationSummary* locations = instruction->GetLocations();
6036 Location first = locations->InAt(0);
6037 Location second = locations->InAt(1);
6038 DCHECK(first.Equals(locations->Out()));
6039
6040 if (instruction->GetResultType() == Primitive::kPrimInt) {
6041 if (second.IsRegister()) {
6042 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006043 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006044 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006045 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006046 } else {
6047 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006048 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006049 }
6050 } else if (second.IsConstant()) {
6051 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6052 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006053 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006054 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006055 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006056 } else {
6057 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006058 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006059 }
6060 } else {
6061 Address address(CpuRegister(RSP), second.GetStackIndex());
6062 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006063 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006064 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006065 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006066 } else {
6067 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006068 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006069 }
6070 }
6071 } else {
6072 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006073 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6074 bool second_is_constant = false;
6075 int64_t value = 0;
6076 if (second.IsConstant()) {
6077 second_is_constant = true;
6078 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006079 }
Mark Mendell40741f32015-04-20 22:10:34 -04006080 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006081
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006082 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006083 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006084 if (is_int32_value) {
6085 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6086 } else {
6087 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6088 }
6089 } else if (second.IsDoubleStackSlot()) {
6090 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006091 } else {
6092 __ andq(first_reg, second.AsRegister<CpuRegister>());
6093 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006094 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006095 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006096 if (is_int32_value) {
6097 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6098 } else {
6099 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6100 }
6101 } else if (second.IsDoubleStackSlot()) {
6102 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006103 } else {
6104 __ orq(first_reg, second.AsRegister<CpuRegister>());
6105 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006106 } else {
6107 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006108 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006109 if (is_int32_value) {
6110 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6111 } else {
6112 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6113 }
6114 } else if (second.IsDoubleStackSlot()) {
6115 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006116 } else {
6117 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6118 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006119 }
6120 }
6121}
6122
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006123void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6124 Location out,
6125 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006126 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006127 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6128 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006129 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006130 if (kUseBakerReadBarrier) {
6131 // Load with fast path based Baker's read barrier.
6132 // /* HeapReference<Object> */ out = *(out + offset)
6133 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006134 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006135 } else {
6136 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006137 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006138 // in the following move operation, as we will need it for the
6139 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006140 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006141 // /* HeapReference<Object> */ out = *(out + offset)
6142 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006143 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006144 }
6145 } else {
6146 // Plain load with no read barrier.
6147 // /* HeapReference<Object> */ out = *(out + offset)
6148 __ movl(out_reg, Address(out_reg, offset));
6149 __ MaybeUnpoisonHeapReference(out_reg);
6150 }
6151}
6152
6153void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6154 Location out,
6155 Location obj,
6156 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006157 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006158 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6159 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6160 if (kEmitCompilerReadBarrier) {
6161 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006162 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006163 // Load with fast path based Baker's read barrier.
6164 // /* HeapReference<Object> */ out = *(obj + offset)
6165 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006166 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006167 } else {
6168 // Load with slow path based read barrier.
6169 // /* HeapReference<Object> */ out = *(obj + offset)
6170 __ movl(out_reg, Address(obj_reg, offset));
6171 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6172 }
6173 } else {
6174 // Plain load with no read barrier.
6175 // /* HeapReference<Object> */ out = *(obj + offset)
6176 __ movl(out_reg, Address(obj_reg, offset));
6177 __ MaybeUnpoisonHeapReference(out_reg);
6178 }
6179}
6180
6181void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6182 Location root,
6183 CpuRegister obj,
6184 uint32_t offset) {
6185 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6186 if (kEmitCompilerReadBarrier) {
6187 if (kUseBakerReadBarrier) {
6188 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6189 // Baker's read barrier are used:
6190 //
6191 // root = obj.field;
6192 // if (Thread::Current()->GetIsGcMarking()) {
6193 // root = ReadBarrier::Mark(root)
6194 // }
6195
6196 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6197 __ movl(root_reg, Address(obj, offset));
6198 static_assert(
6199 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6200 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6201 "have different sizes.");
6202 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6203 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6204 "have different sizes.");
6205
6206 // Slow path used to mark the GC root `root`.
6207 SlowPathCode* slow_path =
6208 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root, root);
6209 codegen_->AddSlowPath(slow_path);
6210
6211 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6212 /* no_rip */ true),
6213 Immediate(0));
6214 __ j(kNotEqual, slow_path->GetEntryLabel());
6215 __ Bind(slow_path->GetExitLabel());
6216 } else {
6217 // GC root loaded through a slow path for read barriers other
6218 // than Baker's.
6219 // /* GcRoot<mirror::Object>* */ root = obj + offset
6220 __ leaq(root_reg, Address(obj, offset));
6221 // /* mirror::Object* */ root = root->Read()
6222 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6223 }
6224 } else {
6225 // Plain GC root load with no read barrier.
6226 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6227 __ movl(root_reg, Address(obj, offset));
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006228 // Note that GC roots are not affected by heap poisoning, thus we
6229 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006230 }
6231}
6232
6233void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6234 Location ref,
6235 CpuRegister obj,
6236 uint32_t offset,
6237 Location temp,
6238 bool needs_null_check) {
6239 DCHECK(kEmitCompilerReadBarrier);
6240 DCHECK(kUseBakerReadBarrier);
6241
6242 // /* HeapReference<Object> */ ref = *(obj + offset)
6243 Address src(obj, offset);
6244 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6245}
6246
6247void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6248 Location ref,
6249 CpuRegister obj,
6250 uint32_t data_offset,
6251 Location index,
6252 Location temp,
6253 bool needs_null_check) {
6254 DCHECK(kEmitCompilerReadBarrier);
6255 DCHECK(kUseBakerReadBarrier);
6256
6257 // /* HeapReference<Object> */ ref =
6258 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6259 Address src = index.IsConstant() ?
6260 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6261 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6262 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6263}
6264
6265void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6266 Location ref,
6267 CpuRegister obj,
6268 const Address& src,
6269 Location temp,
6270 bool needs_null_check) {
6271 DCHECK(kEmitCompilerReadBarrier);
6272 DCHECK(kUseBakerReadBarrier);
6273
6274 // In slow path based read barriers, the read barrier call is
6275 // inserted after the original load. However, in fast path based
6276 // Baker's read barriers, we need to perform the load of
6277 // mirror::Object::monitor_ *before* the original reference load.
6278 // This load-load ordering is required by the read barrier.
6279 // The fast path/slow path (for Baker's algorithm) should look like:
6280 //
6281 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6282 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6283 // HeapReference<Object> ref = *src; // Original reference load.
6284 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6285 // if (is_gray) {
6286 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6287 // }
6288 //
6289 // Note: the original implementation in ReadBarrier::Barrier is
6290 // slightly more complex as:
6291 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006292 // the high-bits of rb_state, which are expected to be all zeroes
6293 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6294 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006295 // - it performs additional checks that we do not do here for
6296 // performance reasons.
6297
6298 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6299 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6300 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6301
6302 // /* int32_t */ monitor = obj->monitor_
6303 __ movl(temp_reg, Address(obj, monitor_offset));
6304 if (needs_null_check) {
6305 MaybeRecordImplicitNullCheck(instruction);
6306 }
6307 // /* LockWord */ lock_word = LockWord(monitor)
6308 static_assert(sizeof(LockWord) == sizeof(int32_t),
6309 "art::LockWord and int32_t have different sizes.");
6310 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6311 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6312 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6313 static_assert(
6314 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6315 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6316
6317 // Load fence to prevent load-load reordering.
6318 // Note that this is a no-op, thanks to the x86-64 memory model.
6319 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6320
6321 // The actual reference load.
6322 // /* HeapReference<Object> */ ref = *src
6323 __ movl(ref_reg, src);
6324
6325 // Object* ref = ref_addr->AsMirrorPtr()
6326 __ MaybeUnpoisonHeapReference(ref_reg);
6327
6328 // Slow path used to mark the object `ref` when it is gray.
6329 SlowPathCode* slow_path =
6330 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref, ref);
6331 AddSlowPath(slow_path);
6332
6333 // if (rb_state == ReadBarrier::gray_ptr_)
6334 // ref = ReadBarrier::Mark(ref);
6335 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6336 __ j(kEqual, slow_path->GetEntryLabel());
6337 __ Bind(slow_path->GetExitLabel());
6338}
6339
6340void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6341 Location out,
6342 Location ref,
6343 Location obj,
6344 uint32_t offset,
6345 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006346 DCHECK(kEmitCompilerReadBarrier);
6347
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006348 // Insert a slow path based read barrier *after* the reference load.
6349 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006350 // If heap poisoning is enabled, the unpoisoning of the loaded
6351 // reference will be carried out by the runtime within the slow
6352 // path.
6353 //
6354 // Note that `ref` currently does not get unpoisoned (when heap
6355 // poisoning is enabled), which is alright as the `ref` argument is
6356 // not used by the artReadBarrierSlow entry point.
6357 //
6358 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6359 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6360 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6361 AddSlowPath(slow_path);
6362
Roland Levillain0d5a2812015-11-13 10:07:31 +00006363 __ jmp(slow_path->GetEntryLabel());
6364 __ Bind(slow_path->GetExitLabel());
6365}
6366
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006367void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6368 Location out,
6369 Location ref,
6370 Location obj,
6371 uint32_t offset,
6372 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006373 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006374 // Baker's read barriers shall be handled by the fast path
6375 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6376 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006377 // If heap poisoning is enabled, unpoisoning will be taken care of
6378 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006379 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006380 } else if (kPoisonHeapReferences) {
6381 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6382 }
6383}
6384
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006385void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6386 Location out,
6387 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006388 DCHECK(kEmitCompilerReadBarrier);
6389
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006390 // Insert a slow path based read barrier *after* the GC root load.
6391 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006392 // Note that GC roots are not affected by heap poisoning, so we do
6393 // not need to do anything special for this here.
6394 SlowPathCode* slow_path =
6395 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6396 AddSlowPath(slow_path);
6397
Roland Levillain0d5a2812015-11-13 10:07:31 +00006398 __ jmp(slow_path->GetEntryLabel());
6399 __ Bind(slow_path->GetExitLabel());
6400}
6401
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006402void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006403 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006404 LOG(FATAL) << "Unreachable";
6405}
6406
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006407void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006408 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006409 LOG(FATAL) << "Unreachable";
6410}
6411
Mark Mendellfe57faa2015-09-18 09:26:15 -04006412// Simple implementation of packed switch - generate cascaded compare/jumps.
6413void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6414 LocationSummary* locations =
6415 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6416 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006417 locations->AddTemp(Location::RequiresRegister());
6418 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006419}
6420
6421void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6422 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006423 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006424 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006425 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6426 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6427 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006428 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6429
6430 // Should we generate smaller inline compare/jumps?
6431 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6432 // Figure out the correct compare values and jump conditions.
6433 // Handle the first compare/branch as a special case because it might
6434 // jump to the default case.
6435 DCHECK_GT(num_entries, 2u);
6436 Condition first_condition;
6437 uint32_t index;
6438 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6439 if (lower_bound != 0) {
6440 first_condition = kLess;
6441 __ cmpl(value_reg_in, Immediate(lower_bound));
6442 __ j(first_condition, codegen_->GetLabelOf(default_block));
6443 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6444
6445 index = 1;
6446 } else {
6447 // Handle all the compare/jumps below.
6448 first_condition = kBelow;
6449 index = 0;
6450 }
6451
6452 // Handle the rest of the compare/jumps.
6453 for (; index + 1 < num_entries; index += 2) {
6454 int32_t compare_to_value = lower_bound + index + 1;
6455 __ cmpl(value_reg_in, Immediate(compare_to_value));
6456 // Jump to successors[index] if value < case_value[index].
6457 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6458 // Jump to successors[index + 1] if value == case_value[index + 1].
6459 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6460 }
6461
6462 if (index != num_entries) {
6463 // There are an odd number of entries. Handle the last one.
6464 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006465 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006466 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6467 }
6468
6469 // And the default for any other value.
6470 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6471 __ jmp(codegen_->GetLabelOf(default_block));
6472 }
6473 return;
6474 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006475
6476 // Remove the bias, if needed.
6477 Register value_reg_out = value_reg_in.AsRegister();
6478 if (lower_bound != 0) {
6479 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6480 value_reg_out = temp_reg.AsRegister();
6481 }
6482 CpuRegister value_reg(value_reg_out);
6483
6484 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006485 __ cmpl(value_reg, Immediate(num_entries - 1));
6486 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006487
Mark Mendell9c86b482015-09-18 13:36:07 -04006488 // We are in the range of the table.
6489 // Load the address of the jump table in the constant area.
6490 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006491
Mark Mendell9c86b482015-09-18 13:36:07 -04006492 // Load the (signed) offset from the jump table.
6493 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6494
6495 // Add the offset to the address of the table base.
6496 __ addq(temp_reg, base_reg);
6497
6498 // And jump.
6499 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006500}
6501
Aart Bikc5d47542016-01-27 17:00:35 -08006502void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6503 if (value == 0) {
6504 __ xorl(dest, dest);
6505 } else {
6506 __ movl(dest, Immediate(value));
6507 }
6508}
6509
Mark Mendell92e83bf2015-05-07 11:25:03 -04006510void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6511 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006512 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006513 __ xorl(dest, dest);
6514 } else if (value > 0 && IsInt<32>(value)) {
6515 // We can use a 32 bit move, as it will zero-extend and is one byte shorter.
6516 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6517 } else {
6518 __ movq(dest, Immediate(value));
6519 }
6520}
6521
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006522void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6523 if (value == 0) {
6524 __ xorps(dest, dest);
6525 } else {
6526 __ movss(dest, LiteralInt32Address(value));
6527 }
6528}
6529
6530void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6531 if (value == 0) {
6532 __ xorpd(dest, dest);
6533 } else {
6534 __ movsd(dest, LiteralInt64Address(value));
6535 }
6536}
6537
6538void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6539 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6540}
6541
6542void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6543 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6544}
6545
Aart Bika19616e2016-02-01 18:57:58 -08006546void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6547 if (value == 0) {
6548 __ testl(dest, dest);
6549 } else {
6550 __ cmpl(dest, Immediate(value));
6551 }
6552}
6553
6554void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6555 if (IsInt<32>(value)) {
6556 if (value == 0) {
6557 __ testq(dest, dest);
6558 } else {
6559 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6560 }
6561 } else {
6562 // Value won't fit in an int.
6563 __ cmpq(dest, LiteralInt64Address(value));
6564 }
6565}
6566
Mark Mendellcfa410b2015-05-25 16:02:44 -04006567void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6568 DCHECK(dest.IsDoubleStackSlot());
6569 if (IsInt<32>(value)) {
6570 // Can move directly as an int32 constant.
6571 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6572 Immediate(static_cast<int32_t>(value)));
6573 } else {
6574 Load64BitValue(CpuRegister(TMP), value);
6575 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6576 }
6577}
6578
Mark Mendell9c86b482015-09-18 13:36:07 -04006579/**
6580 * Class to handle late fixup of offsets into constant area.
6581 */
6582class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6583 public:
6584 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6585 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6586
6587 protected:
6588 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6589
6590 CodeGeneratorX86_64* codegen_;
6591
6592 private:
6593 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6594 // Patch the correct offset for the instruction. We use the address of the
6595 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6596 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6597 int32_t relative_position = constant_offset - pos;
6598
6599 // Patch in the right value.
6600 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6601 }
6602
6603 // Location in constant area that the fixup refers to.
6604 size_t offset_into_constant_area_;
6605};
6606
6607/**
6608 t * Class to handle late fixup of offsets to a jump table that will be created in the
6609 * constant area.
6610 */
6611class JumpTableRIPFixup : public RIPFixup {
6612 public:
6613 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6614 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6615
6616 void CreateJumpTable() {
6617 X86_64Assembler* assembler = codegen_->GetAssembler();
6618
6619 // Ensure that the reference to the jump table has the correct offset.
6620 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6621 SetOffset(offset_in_constant_table);
6622
6623 // Compute the offset from the start of the function to this jump table.
6624 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6625
6626 // Populate the jump table with the correct values for the jump table.
6627 int32_t num_entries = switch_instr_->GetNumEntries();
6628 HBasicBlock* block = switch_instr_->GetBlock();
6629 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6630 // The value that we want is the target offset - the position of the table.
6631 for (int32_t i = 0; i < num_entries; i++) {
6632 HBasicBlock* b = successors[i];
6633 Label* l = codegen_->GetLabelOf(b);
6634 DCHECK(l->IsBound());
6635 int32_t offset_to_block = l->Position() - current_table_offset;
6636 assembler->AppendInt32(offset_to_block);
6637 }
6638 }
6639
6640 private:
6641 const HPackedSwitch* switch_instr_;
6642};
6643
Mark Mendellf55c3e02015-03-26 21:07:46 -04006644void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6645 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006646 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006647 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6648 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006649 assembler->Align(4, 0);
6650 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006651
6652 // Populate any jump tables.
6653 for (auto jump_table : fixups_to_jump_tables_) {
6654 jump_table->CreateJumpTable();
6655 }
6656
6657 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006658 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006659 }
6660
6661 // And finish up.
6662 CodeGenerator::Finalize(allocator);
6663}
6664
Mark Mendellf55c3e02015-03-26 21:07:46 -04006665Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6666 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6667 return Address::RIP(fixup);
6668}
6669
6670Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6671 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6672 return Address::RIP(fixup);
6673}
6674
6675Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6676 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6677 return Address::RIP(fixup);
6678}
6679
6680Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6681 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6682 return Address::RIP(fixup);
6683}
6684
Andreas Gampe85b62f22015-09-09 13:15:38 -07006685// TODO: trg as memory.
6686void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6687 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006688 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006689 return;
6690 }
6691
6692 DCHECK_NE(type, Primitive::kPrimVoid);
6693
6694 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6695 if (trg.Equals(return_loc)) {
6696 return;
6697 }
6698
6699 // Let the parallel move resolver take care of all of this.
6700 HParallelMove parallel_move(GetGraph()->GetArena());
6701 parallel_move.AddMove(return_loc, trg, type, nullptr);
6702 GetMoveResolver()->EmitNativeCode(&parallel_move);
6703}
6704
Mark Mendell9c86b482015-09-18 13:36:07 -04006705Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6706 // Create a fixup to be used to create and address the jump table.
6707 JumpTableRIPFixup* table_fixup =
6708 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6709
6710 // We have to populate the jump tables.
6711 fixups_to_jump_tables_.push_back(table_fixup);
6712 return Address::RIP(table_fixup);
6713}
6714
Mark Mendellea5af682015-10-22 17:35:49 -04006715void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6716 const Address& addr_high,
6717 int64_t v,
6718 HInstruction* instruction) {
6719 if (IsInt<32>(v)) {
6720 int32_t v_32 = v;
6721 __ movq(addr_low, Immediate(v_32));
6722 MaybeRecordImplicitNullCheck(instruction);
6723 } else {
6724 // Didn't fit in a register. Do it in pieces.
6725 int32_t low_v = Low32Bits(v);
6726 int32_t high_v = High32Bits(v);
6727 __ movl(addr_low, Immediate(low_v));
6728 MaybeRecordImplicitNullCheck(instruction);
6729 __ movl(addr_high, Immediate(high_v));
6730 }
6731}
6732
Roland Levillain4d027112015-07-01 15:41:14 +01006733#undef __
6734
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006735} // namespace x86_64
6736} // namespace art