blob: a524057359b5508f5d34494f2a5b78799b3b367c [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070054// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Calin Juravle175dc732015-08-25 15:42:32 +010056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000069 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowNullPointer),
70 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000091 if (instruction_->CanThrowIntoCatchBlock()) {
92 // Live registers will be restored in the catch block if caught.
93 SaveLiveRegisters(codegen, instruction_->GetLocations());
94 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000095 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowDivZero),
96 instruction_,
97 instruction_->GetDexPc(),
98 this);
Roland Levillain888d0672015-11-23 18:53:50 +000099 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 bool IsFatal() const OVERRIDE { return true; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
105
Calin Juravled0d48522014-11-04 16:40:20 +0000106 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000107 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
108};
109
Andreas Gampe85b62f22015-09-09 13:15:38 -0700110class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000111 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000112 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
113 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000114
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000115 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 if (is_div_) {
119 __ negl(cpu_reg_);
120 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400121 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000122 }
123
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 } else {
125 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000126 if (is_div_) {
127 __ negq(cpu_reg_);
128 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400129 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000130 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 }
Calin Juravled0d48522014-11-04 16:40:20 +0000132 __ jmp(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
136
Calin Juravled0d48522014-11-04 16:40:20 +0000137 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000138 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000139 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000140 const bool is_div_;
141 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000142};
143
Andreas Gampe85b62f22015-09-09 13:15:38 -0700144class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100146 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000148
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000151 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000152 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000153 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pTestSuspend),
154 instruction_,
155 instruction_->GetDexPc(),
156 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000157 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000158 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100159 if (successor_ == nullptr) {
160 __ jmp(GetReturnLabel());
161 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000162 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100163 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000164 }
165
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 Label* GetReturnLabel() {
167 DCHECK(successor_ == nullptr);
168 return &return_label_;
169 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000170
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100171 HBasicBlock* GetSuccessor() const {
172 return successor_;
173 }
174
Alexandre Rames9931f312015-06-19 14:47:01 +0100175 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
176
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000177 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100178 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000179 Label return_label_;
180
181 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
182};
183
Andreas Gampe85b62f22015-09-09 13:15:38 -0700184class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100185 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100186 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000187 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000189 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100190 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000191 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100192 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000193 if (instruction_->CanThrowIntoCatchBlock()) {
194 // Live registers will be restored in the catch block if caught.
195 SaveLiveRegisters(codegen, instruction_->GetLocations());
196 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000197 // We're moving two locations to locations that could overlap, so we need a parallel
198 // move resolver.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100199 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000200 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100201 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000202 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100203 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100204 locations->InAt(1),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100205 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
206 Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100207 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
208 ? QUICK_ENTRY_POINT(pThrowStringBounds)
209 : QUICK_ENTRY_POINT(pThrowArrayBounds);
210 x86_64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000211 instruction_,
212 instruction_->GetDexPc(),
213 this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100214 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000215 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100216 }
217
Alexandre Rames8158f282015-08-07 10:26:17 +0100218 bool IsFatal() const OVERRIDE { return true; }
219
Alexandre Rames9931f312015-06-19 14:47:01 +0100220 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
221
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100222 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100223 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
224};
225
Andreas Gampe85b62f22015-09-09 13:15:38 -0700226class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100227 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000228 LoadClassSlowPathX86_64(HLoadClass* cls,
229 HInstruction* at,
230 uint32_t dex_pc,
231 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000232 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
234 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100235
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000236 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000237 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000238 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100239 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000241 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100243 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000245 x86_64_codegen->InvokeRuntime(do_clinit_ ?
246 QUICK_ENTRY_POINT(pInitializeStaticStorage) :
247 QUICK_ENTRY_POINT(pInitializeType),
248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class LoadStringSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000288 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000289 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000290
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000292 LocationSummary* locations = instruction_->GetLocations();
293 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
294
Roland Levillain0d5a2812015-11-13 10:07:31 +0000295 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000296 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000297 SaveLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000298
299 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000300 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
301 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000302 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pResolveString),
303 instruction_,
304 instruction_->GetDexPc(),
305 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000307 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000309 __ jmp(GetExitLabel());
310 }
311
Alexandre Rames9931f312015-06-19 14:47:01 +0100312 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
313
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000314 private:
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000315 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
316};
317
Andreas Gampe85b62f22015-09-09 13:15:38 -0700318class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000319 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000320 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000321 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000322
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000323 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000324 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100325 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
326 : locations->Out();
327 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000328 DCHECK(instruction_->IsCheckCast()
329 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330
Roland Levillain0d5a2812015-11-13 10:07:31 +0000331 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000333
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000334 if (!is_fatal_) {
335 SaveLiveRegisters(codegen, locations);
336 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000337
338 // We're moving two locations to locations that could overlap, so we need a parallel
339 // move resolver.
340 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000341 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000343 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100344 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100345 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100346 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
347 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000349 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000350 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
351 instruction_,
352 dex_pc,
353 this);
354 CheckEntrypointTypes<
355 kQuickInstanceofNonTrivial, uint32_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 } else {
357 DCHECK(instruction_->IsCheckCast());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000358 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
359 instruction_,
360 dex_pc,
361 this);
362 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
393 SaveLiveRegisters(codegen, instruction_->GetLocations());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
Aart Bik42249c32016-01-07 15:33:50 -0800395 instruction_,
396 instruction_->GetDexPc(),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000397 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000398 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 }
400
Alexandre Rames9931f312015-06-19 14:47:01 +0100401 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
402
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
405};
406
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100407class ArraySetSlowPathX86_64 : public SlowPathCode {
408 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000409 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100410
411 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
412 LocationSummary* locations = instruction_->GetLocations();
413 __ Bind(GetEntryLabel());
414 SaveLiveRegisters(codegen, locations);
415
416 InvokeRuntimeCallingConvention calling_convention;
417 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
418 parallel_move.AddMove(
419 locations->InAt(0),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
421 Primitive::kPrimNot,
422 nullptr);
423 parallel_move.AddMove(
424 locations->InAt(1),
425 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
426 Primitive::kPrimInt,
427 nullptr);
428 parallel_move.AddMove(
429 locations->InAt(2),
430 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
431 Primitive::kPrimNot,
432 nullptr);
433 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
434
Roland Levillain0d5a2812015-11-13 10:07:31 +0000435 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
436 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
437 instruction_,
438 instruction_->GetDexPc(),
439 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000440 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 RestoreLiveRegisters(codegen, locations);
442 __ jmp(GetExitLabel());
443 }
444
445 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
446
447 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100448 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
449};
450
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000451// Slow path marking an object during a read barrier.
452class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
453 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100454 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj)
455 : SlowPathCode(instruction), obj_(obj) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000456 DCHECK(kEmitCompilerReadBarrier);
457 }
458
459 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
460
461 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
462 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100463 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000464 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100465 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000466 DCHECK(instruction_->IsInstanceFieldGet() ||
467 instruction_->IsStaticFieldGet() ||
468 instruction_->IsArrayGet() ||
469 instruction_->IsLoadClass() ||
470 instruction_->IsLoadString() ||
471 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100472 instruction_->IsCheckCast() ||
473 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
474 instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000475 << "Unexpected instruction in read barrier marking slow path: "
476 << instruction_->DebugName();
477
478 __ Bind(GetEntryLabel());
Roland Levillain02b75802016-07-13 11:54:35 +0100479 // Save live registers before the runtime call, and in particular
480 // RDI and/or RAX (if they are live), as they are clobbered by
481 // functions art_quick_read_barrier_mark_regX.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000482 SaveLiveRegisters(codegen, locations);
483
484 InvokeRuntimeCallingConvention calling_convention;
485 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100486 DCHECK_NE(reg, RSP);
487 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
488 // "Compact" slow path, saving two moves.
489 //
490 // Instead of using the standard runtime calling convention (input
491 // and output in R0):
492 //
493 // RDI <- obj
494 // RAX <- ReadBarrierMark(RDI)
495 // obj <- RAX
496 //
497 // we just use rX (the register holding `obj`) as input and output
498 // of a dedicated entrypoint:
499 //
500 // rX <- ReadBarrierMarkRegX(rX)
501 //
502 int32_t entry_point_offset =
503 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64WordSize>(reg);
504 // TODO: Do not emit a stack map for this runtime call.
505 x86_64_codegen->InvokeRuntime(entry_point_offset,
506 instruction_,
507 instruction_->GetDexPc(),
508 this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000509
510 RestoreLiveRegisters(codegen, locations);
511 __ jmp(GetExitLabel());
512 }
513
514 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000515 const Location obj_;
516
517 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
518};
519
Roland Levillain0d5a2812015-11-13 10:07:31 +0000520// Slow path generating a read barrier for a heap reference.
521class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
522 public:
523 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
524 Location out,
525 Location ref,
526 Location obj,
527 uint32_t offset,
528 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000529 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000530 out_(out),
531 ref_(ref),
532 obj_(obj),
533 offset_(offset),
534 index_(index) {
535 DCHECK(kEmitCompilerReadBarrier);
536 // If `obj` is equal to `out` or `ref`, it means the initial
537 // object has been overwritten by (or after) the heap object
538 // reference load to be instrumented, e.g.:
539 //
540 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000541 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000542 //
543 // In that case, we have lost the information about the original
544 // object, and the emitted read barrier cannot work properly.
545 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
546 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
547}
548
549 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
550 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
551 LocationSummary* locations = instruction_->GetLocations();
552 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
553 DCHECK(locations->CanCall());
554 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100555 DCHECK(instruction_->IsInstanceFieldGet() ||
556 instruction_->IsStaticFieldGet() ||
557 instruction_->IsArrayGet() ||
558 instruction_->IsInstanceOf() ||
559 instruction_->IsCheckCast() ||
560 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000561 instruction_->GetLocations()->Intrinsified()))
562 << "Unexpected instruction in read barrier for heap reference slow path: "
563 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000564
565 __ Bind(GetEntryLabel());
566 SaveLiveRegisters(codegen, locations);
567
568 // We may have to change the index's value, but as `index_` is a
569 // constant member (like other "inputs" of this slow path),
570 // introduce a copy of it, `index`.
571 Location index = index_;
572 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100573 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000574 if (instruction_->IsArrayGet()) {
575 // Compute real offset and store it in index_.
576 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
577 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
578 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
579 // We are about to change the value of `index_reg` (see the
580 // calls to art::x86_64::X86_64Assembler::shll and
581 // art::x86_64::X86_64Assembler::AddImmediate below), but it
582 // has not been saved by the previous call to
583 // art::SlowPathCode::SaveLiveRegisters, as it is a
584 // callee-save register --
585 // art::SlowPathCode::SaveLiveRegisters does not consider
586 // callee-save registers, as it has been designed with the
587 // assumption that callee-save registers are supposed to be
588 // handled by the called function. So, as a callee-save
589 // register, `index_reg` _would_ eventually be saved onto
590 // the stack, but it would be too late: we would have
591 // changed its value earlier. Therefore, we manually save
592 // it here into another freely available register,
593 // `free_reg`, chosen of course among the caller-save
594 // registers (as a callee-save `free_reg` register would
595 // exhibit the same problem).
596 //
597 // Note we could have requested a temporary register from
598 // the register allocator instead; but we prefer not to, as
599 // this is a slow path, and we know we can find a
600 // caller-save register that is available.
601 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
602 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
603 index_reg = free_reg;
604 index = Location::RegisterLocation(index_reg);
605 } else {
606 // The initial register stored in `index_` has already been
607 // saved in the call to art::SlowPathCode::SaveLiveRegisters
608 // (as it is not a callee-save register), so we can freely
609 // use it.
610 }
611 // Shifting the index value contained in `index_reg` by the
612 // scale factor (2) cannot overflow in practice, as the
613 // runtime is unable to allocate object arrays with a size
614 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
615 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
616 static_assert(
617 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
618 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
619 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
620 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100621 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
622 // intrinsics, `index_` is not shifted by a scale factor of 2
623 // (as in the case of ArrayGet), as it is actually an offset
624 // to an object field within an object.
625 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000626 DCHECK(instruction_->GetLocations()->Intrinsified());
627 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
628 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
629 << instruction_->AsInvoke()->GetIntrinsic();
630 DCHECK_EQ(offset_, 0U);
631 DCHECK(index_.IsRegister());
632 }
633 }
634
635 // We're moving two or three locations to locations that could
636 // overlap, so we need a parallel move resolver.
637 InvokeRuntimeCallingConvention calling_convention;
638 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
639 parallel_move.AddMove(ref_,
640 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
641 Primitive::kPrimNot,
642 nullptr);
643 parallel_move.AddMove(obj_,
644 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
645 Primitive::kPrimNot,
646 nullptr);
647 if (index.IsValid()) {
648 parallel_move.AddMove(index,
649 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
650 Primitive::kPrimInt,
651 nullptr);
652 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
653 } else {
654 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
655 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
656 }
657 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
658 instruction_,
659 instruction_->GetDexPc(),
660 this);
661 CheckEntrypointTypes<
662 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
663 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
664
665 RestoreLiveRegisters(codegen, locations);
666 __ jmp(GetExitLabel());
667 }
668
669 const char* GetDescription() const OVERRIDE {
670 return "ReadBarrierForHeapReferenceSlowPathX86_64";
671 }
672
673 private:
674 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
675 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
676 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
677 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
678 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
679 return static_cast<CpuRegister>(i);
680 }
681 }
682 // We shall never fail to find a free caller-save register, as
683 // there are more than two core caller-save registers on x86-64
684 // (meaning it is possible to find one which is different from
685 // `ref` and `obj`).
686 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
687 LOG(FATAL) << "Could not find a free caller-save register";
688 UNREACHABLE();
689 }
690
Roland Levillain0d5a2812015-11-13 10:07:31 +0000691 const Location out_;
692 const Location ref_;
693 const Location obj_;
694 const uint32_t offset_;
695 // An additional location containing an index to an array.
696 // Only used for HArrayGet and the UnsafeGetObject &
697 // UnsafeGetObjectVolatile intrinsics.
698 const Location index_;
699
700 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
701};
702
703// Slow path generating a read barrier for a GC root.
704class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
705 public:
706 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000707 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000708 DCHECK(kEmitCompilerReadBarrier);
709 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710
711 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
712 LocationSummary* locations = instruction_->GetLocations();
713 DCHECK(locations->CanCall());
714 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000715 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
716 << "Unexpected instruction in read barrier for GC root slow path: "
717 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000718
719 __ Bind(GetEntryLabel());
720 SaveLiveRegisters(codegen, locations);
721
722 InvokeRuntimeCallingConvention calling_convention;
723 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
724 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
725 x86_64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
726 instruction_,
727 instruction_->GetDexPc(),
728 this);
729 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
730 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
731
732 RestoreLiveRegisters(codegen, locations);
733 __ jmp(GetExitLabel());
734 }
735
736 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
737
738 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000739 const Location out_;
740 const Location root_;
741
742 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
743};
744
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100745#undef __
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700746// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
747#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100748
Roland Levillain4fa13f62015-07-06 18:11:54 +0100749inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700750 switch (cond) {
751 case kCondEQ: return kEqual;
752 case kCondNE: return kNotEqual;
753 case kCondLT: return kLess;
754 case kCondLE: return kLessEqual;
755 case kCondGT: return kGreater;
756 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700757 case kCondB: return kBelow;
758 case kCondBE: return kBelowEqual;
759 case kCondA: return kAbove;
760 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700761 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100762 LOG(FATAL) << "Unreachable";
763 UNREACHABLE();
764}
765
Aart Bike9f37602015-10-09 11:15:55 -0700766// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100767inline Condition X86_64FPCondition(IfCondition cond) {
768 switch (cond) {
769 case kCondEQ: return kEqual;
770 case kCondNE: return kNotEqual;
771 case kCondLT: return kBelow;
772 case kCondLE: return kBelowEqual;
773 case kCondGT: return kAbove;
774 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700775 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100776 };
777 LOG(FATAL) << "Unreachable";
778 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700779}
780
Vladimir Markodc151b22015-10-15 18:02:30 +0100781HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
782 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
783 MethodReference target_method ATTRIBUTE_UNUSED) {
784 switch (desired_dispatch_info.code_ptr_location) {
785 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
786 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
787 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
788 return HInvokeStaticOrDirect::DispatchInfo {
789 desired_dispatch_info.method_load_kind,
790 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
791 desired_dispatch_info.method_load_data,
792 0u
793 };
794 default:
795 return desired_dispatch_info;
796 }
797}
798
Serguei Katkov288c7a82016-05-16 11:53:15 +0600799Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
800 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800801 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000802 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
803 switch (invoke->GetMethodLoadKind()) {
804 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
805 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000806 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000807 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000808 break;
809 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000810 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000811 break;
812 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
813 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
814 break;
815 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
816 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
817 method_patches_.emplace_back(invoke->GetTargetMethod());
818 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
819 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000820 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000821 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000822 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000823 // Bind a new fixup label at the end of the "movl" insn.
824 uint32_t offset = invoke->GetDexCacheArrayOffset();
825 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000826 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000827 }
Vladimir Marko58155012015-08-19 12:49:41 +0000828 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000829 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000830 Register method_reg;
831 CpuRegister reg = temp.AsRegister<CpuRegister>();
832 if (current_method.IsRegister()) {
833 method_reg = current_method.AsRegister<Register>();
834 } else {
835 DCHECK(invoke->GetLocations()->Intrinsified());
836 DCHECK(!current_method.IsValid());
837 method_reg = reg.AsRegister();
838 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
839 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000840 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100841 __ movq(reg,
842 Address(CpuRegister(method_reg),
843 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100844 // temp = temp[index_in_cache];
845 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
846 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000847 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
848 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100849 }
Vladimir Marko58155012015-08-19 12:49:41 +0000850 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600851 return callee_method;
852}
853
854void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
855 Location temp) {
856 // All registers are assumed to be correctly set up.
857 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000858
859 switch (invoke->GetCodePtrLocation()) {
860 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
861 __ call(&frame_entry_label_);
862 break;
863 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
864 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
865 Label* label = &relative_call_patches_.back().label;
866 __ call(label); // Bind to the patch label, override at link time.
867 __ Bind(label); // Bind the label at the end of the "call" insn.
868 break;
869 }
870 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
871 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100872 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
873 LOG(FATAL) << "Unsupported";
874 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000875 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
876 // (callee_method + offset_of_quick_compiled_code)()
877 __ call(Address(callee_method.AsRegister<CpuRegister>(),
878 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
879 kX86_64WordSize).SizeValue()));
880 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000881 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800882
883 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800884}
885
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000886void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
887 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
888 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
889 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000890
891 // Use the calling convention instead of the location of the receiver, as
892 // intrinsics may have put the receiver in a different register. In the intrinsics
893 // slow path, the arguments have been moved to the right place, so here we are
894 // guaranteed that the receiver is the first register of the calling convention.
895 InvokeDexCallingConvention calling_convention;
896 Register receiver = calling_convention.GetRegisterAt(0);
897
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000898 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000899 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000900 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000901 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000902 // Instead of simply (possibly) unpoisoning `temp` here, we should
903 // emit a read barrier for the previous class reference load.
904 // However this is not required in practice, as this is an
905 // intermediate/temporary reference and because the current
906 // concurrent copying collector keeps the from-space memory
907 // intact/accessible until the end of the marking phase (the
908 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000909 __ MaybeUnpoisonHeapReference(temp);
910 // temp = temp->GetMethodAt(method_offset);
911 __ movq(temp, Address(temp, method_offset));
912 // call temp->GetEntryPoint();
913 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
914 kX86_64WordSize).SizeValue()));
915}
916
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000917void CodeGeneratorX86_64::RecordSimplePatch() {
918 if (GetCompilerOptions().GetIncludePatchInformation()) {
919 simple_patches_.emplace_back();
920 __ Bind(&simple_patches_.back());
921 }
922}
923
924void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
925 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
926 __ Bind(&string_patches_.back().label);
927}
928
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100929void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
930 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
931 __ Bind(&type_patches_.back().label);
932}
933
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000934Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
935 uint32_t element_offset) {
936 // Add a patch entry and return the label.
937 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
938 return &pc_relative_dex_cache_patches_.back().label;
939}
940
Vladimir Marko58155012015-08-19 12:49:41 +0000941void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
942 DCHECK(linker_patches->empty());
943 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000944 method_patches_.size() +
945 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000946 pc_relative_dex_cache_patches_.size() +
947 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100948 string_patches_.size() +
949 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000950 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000951 // The label points to the end of the "movl" insn but the literal offset for method
952 // patch needs to point to the embedded constant which occupies the last 4 bytes.
953 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000954 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000955 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000956 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
957 info.target_method.dex_file,
958 info.target_method.dex_method_index));
959 }
960 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000961 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000962 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
963 info.target_method.dex_file,
964 info.target_method.dex_method_index));
965 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000966 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
967 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000968 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
969 &info.target_dex_file,
970 info.label.Position(),
971 info.element_offset));
972 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000973 for (const Label& label : simple_patches_) {
974 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
975 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
976 }
977 for (const StringPatchInfo<Label>& info : string_patches_) {
978 // These are always PC-relative, see GetSupportedLoadStringKind().
979 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
980 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
981 &info.dex_file,
982 info.label.Position(),
983 info.string_index));
984 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100985 for (const TypePatchInfo<Label>& info : type_patches_) {
986 // These are always PC-relative, see GetSupportedLoadClassKind().
987 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
988 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
989 &info.dex_file,
990 info.label.Position(),
991 info.type_index));
992 }
Vladimir Marko58155012015-08-19 12:49:41 +0000993}
994
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100995void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100996 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100997}
998
999void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001000 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001001}
1002
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001003size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1004 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1005 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001006}
1007
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001008size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1009 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1010 return kX86_64WordSize;
1011}
1012
1013size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1014 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1015 return kX86_64WordSize;
1016}
1017
1018size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1019 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1020 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001021}
1022
Calin Juravle175dc732015-08-25 15:42:32 +01001023void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1024 HInstruction* instruction,
1025 uint32_t dex_pc,
1026 SlowPathCode* slow_path) {
1027 InvokeRuntime(GetThreadOffset<kX86_64WordSize>(entrypoint).Int32Value(),
1028 instruction,
1029 dex_pc,
1030 slow_path);
1031}
1032
1033void CodeGeneratorX86_64::InvokeRuntime(int32_t entry_point_offset,
Alexandre Rames8158f282015-08-07 10:26:17 +01001034 HInstruction* instruction,
1035 uint32_t dex_pc,
1036 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001037 ValidateInvokeRuntime(instruction, slow_path);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001038 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
Alexandre Rames8158f282015-08-07 10:26:17 +01001039 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames8158f282015-08-07 10:26:17 +01001040}
1041
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001042static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001043// Use a fake return address register to mimic Quick.
1044static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001045CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001046 const X86_64InstructionSetFeatures& isa_features,
1047 const CompilerOptions& compiler_options,
1048 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001049 : CodeGenerator(graph,
1050 kNumberOfCpuRegisters,
1051 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001052 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001053 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1054 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001055 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001056 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1057 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001058 compiler_options,
1059 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001060 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001061 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001062 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001063 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001064 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001065 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001066 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001067 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1068 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001069 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001070 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1071 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001072 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001073 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001074 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1075}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001076
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001077InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1078 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001079 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001080 assembler_(codegen->GetAssembler()),
1081 codegen_(codegen) {}
1082
David Brazdil58282f42016-01-14 12:45:10 +00001083void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001084 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001085 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001086
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001087 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001088 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001089}
1090
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001091static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001092 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001093}
David Srbecky9d8606d2015-04-12 09:35:32 +01001094
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001095static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001096 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001097}
1098
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001099void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001100 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001101 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001102 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001103 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001104 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001105
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001106 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001107 __ testq(CpuRegister(RAX), Address(
1108 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001109 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001110 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001111
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001112 if (HasEmptyFrame()) {
1113 return;
1114 }
1115
Nicolas Geoffray98893962015-01-21 12:32:32 +00001116 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001117 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001118 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001119 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001120 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1121 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001122 }
1123 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001124
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001125 int adjust = GetFrameSize() - GetCoreSpillSize();
1126 __ subq(CpuRegister(RSP), Immediate(adjust));
1127 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001128 uint32_t xmm_spill_location = GetFpuSpillStart();
1129 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001130
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001131 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1132 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001133 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1134 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1135 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001136 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001137 }
1138
Mathieu Chartiere401d142015-04-22 13:56:20 -07001139 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001140 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141}
1142
1143void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001144 __ cfi().RememberState();
1145 if (!HasEmptyFrame()) {
1146 uint32_t xmm_spill_location = GetFpuSpillStart();
1147 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1148 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1149 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1150 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1151 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1152 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1153 }
1154 }
1155
1156 int adjust = GetFrameSize() - GetCoreSpillSize();
1157 __ addq(CpuRegister(RSP), Immediate(adjust));
1158 __ cfi().AdjustCFAOffset(-adjust);
1159
1160 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1161 Register reg = kCoreCalleeSaves[i];
1162 if (allocated_registers_.ContainsCoreRegister(reg)) {
1163 __ popq(CpuRegister(reg));
1164 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1165 __ cfi().Restore(DWARFReg(reg));
1166 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001167 }
1168 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001169 __ ret();
1170 __ cfi().RestoreState();
1171 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001172}
1173
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001174void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1175 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001176}
1177
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001178void CodeGeneratorX86_64::Move(Location destination, Location source) {
1179 if (source.Equals(destination)) {
1180 return;
1181 }
1182 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001183 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001184 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001185 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001186 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001187 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001188 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001189 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1190 } else if (source.IsConstant()) {
1191 HConstant* constant = source.GetConstant();
1192 if (constant->IsLongConstant()) {
1193 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1194 } else {
1195 Load32BitValue(dest, GetInt32ValueOf(constant));
1196 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001197 } else {
1198 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001199 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001200 }
1201 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001202 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001203 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001204 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001205 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001206 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1207 } else if (source.IsConstant()) {
1208 HConstant* constant = source.GetConstant();
1209 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1210 if (constant->IsFloatConstant()) {
1211 Load32BitValue(dest, static_cast<int32_t>(value));
1212 } else {
1213 Load64BitValue(dest, value);
1214 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001215 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001216 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001217 } else {
1218 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001219 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001220 }
1221 } else if (destination.IsStackSlot()) {
1222 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001223 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001224 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001225 } else if (source.IsFpuRegister()) {
1226 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001227 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001228 } else if (source.IsConstant()) {
1229 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001230 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001231 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001232 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001233 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001234 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1235 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001236 }
1237 } else {
1238 DCHECK(destination.IsDoubleStackSlot());
1239 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001240 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001241 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001242 } else if (source.IsFpuRegister()) {
1243 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001244 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001245 } else if (source.IsConstant()) {
1246 HConstant* constant = source.GetConstant();
Zheng Xu12bca972015-03-30 19:35:50 +08001247 int64_t value;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001248 if (constant->IsDoubleConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001249 value = bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001250 } else {
1251 DCHECK(constant->IsLongConstant());
1252 value = constant->AsLongConstant()->GetValue();
1253 }
Mark Mendellcfa410b2015-05-25 16:02:44 -04001254 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001255 } else {
1256 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001257 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1258 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001259 }
1260 }
1261}
1262
Calin Juravle175dc732015-08-25 15:42:32 +01001263void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1264 DCHECK(location.IsRegister());
1265 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1266}
1267
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268void CodeGeneratorX86_64::MoveLocation(
1269 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1270 Move(dst, src);
1271}
1272
1273void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1274 if (location.IsRegister()) {
1275 locations->AddTemp(location);
1276 } else {
1277 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1278 }
1279}
1280
David Brazdilfc6a86a2015-06-26 10:33:45 +00001281void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001282 DCHECK(!successor->IsExitBlock());
1283
1284 HBasicBlock* block = got->GetBlock();
1285 HInstruction* previous = got->GetPrevious();
1286
1287 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001288 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001289 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1290 return;
1291 }
1292
1293 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1294 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1295 }
1296 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001297 __ jmp(codegen_->GetLabelOf(successor));
1298 }
1299}
1300
David Brazdilfc6a86a2015-06-26 10:33:45 +00001301void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1302 got->SetLocations(nullptr);
1303}
1304
1305void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1306 HandleGoto(got, got->GetSuccessor());
1307}
1308
1309void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1310 try_boundary->SetLocations(nullptr);
1311}
1312
1313void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1314 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1315 if (!successor->IsExitBlock()) {
1316 HandleGoto(try_boundary, successor);
1317 }
1318}
1319
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001320void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1321 exit->SetLocations(nullptr);
1322}
1323
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001324void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001325}
1326
Mark Mendell152408f2015-12-31 12:28:50 -05001327template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001328void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001329 LabelType* true_label,
1330 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001331 if (cond->IsFPConditionTrueIfNaN()) {
1332 __ j(kUnordered, true_label);
1333 } else if (cond->IsFPConditionFalseIfNaN()) {
1334 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001335 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001336 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001337}
1338
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001339void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001340 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001341
Mark Mendellc4701932015-04-10 13:18:51 -04001342 Location left = locations->InAt(0);
1343 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001344 Primitive::Type type = condition->InputAt(0)->GetType();
1345 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001346 case Primitive::kPrimBoolean:
1347 case Primitive::kPrimByte:
1348 case Primitive::kPrimChar:
1349 case Primitive::kPrimShort:
1350 case Primitive::kPrimInt:
1351 case Primitive::kPrimNot: {
1352 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1353 if (right.IsConstant()) {
1354 int32_t value = CodeGenerator::GetInt32ValueOf(right.GetConstant());
1355 if (value == 0) {
1356 __ testl(left_reg, left_reg);
1357 } else {
1358 __ cmpl(left_reg, Immediate(value));
1359 }
1360 } else if (right.IsStackSlot()) {
1361 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1362 } else {
1363 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1364 }
1365 break;
1366 }
Mark Mendellc4701932015-04-10 13:18:51 -04001367 case Primitive::kPrimLong: {
1368 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1369 if (right.IsConstant()) {
1370 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001371 codegen_->Compare64BitValue(left_reg, value);
Mark Mendellc4701932015-04-10 13:18:51 -04001372 } else if (right.IsDoubleStackSlot()) {
1373 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1374 } else {
1375 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1376 }
Mark Mendellc4701932015-04-10 13:18:51 -04001377 break;
1378 }
1379 case Primitive::kPrimFloat: {
1380 if (right.IsFpuRegister()) {
1381 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1382 } else if (right.IsConstant()) {
1383 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1384 codegen_->LiteralFloatAddress(
1385 right.GetConstant()->AsFloatConstant()->GetValue()));
1386 } else {
1387 DCHECK(right.IsStackSlot());
1388 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1389 Address(CpuRegister(RSP), right.GetStackIndex()));
1390 }
Mark Mendellc4701932015-04-10 13:18:51 -04001391 break;
1392 }
1393 case Primitive::kPrimDouble: {
1394 if (right.IsFpuRegister()) {
1395 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1396 } else if (right.IsConstant()) {
1397 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1398 codegen_->LiteralDoubleAddress(
1399 right.GetConstant()->AsDoubleConstant()->GetValue()));
1400 } else {
1401 DCHECK(right.IsDoubleStackSlot());
1402 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1403 Address(CpuRegister(RSP), right.GetStackIndex()));
1404 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001405 break;
1406 }
1407 default:
1408 LOG(FATAL) << "Unexpected condition type " << type;
1409 }
1410}
1411
1412template<class LabelType>
1413void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1414 LabelType* true_target_in,
1415 LabelType* false_target_in) {
1416 // Generated branching requires both targets to be explicit. If either of the
1417 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1418 LabelType fallthrough_target;
1419 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1420 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1421
1422 // Generate the comparison to set the CC.
1423 GenerateCompareTest(condition);
1424
1425 // Now generate the correct jump(s).
1426 Primitive::Type type = condition->InputAt(0)->GetType();
1427 switch (type) {
1428 case Primitive::kPrimLong: {
1429 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1430 break;
1431 }
1432 case Primitive::kPrimFloat: {
1433 GenerateFPJumps(condition, true_target, false_target);
1434 break;
1435 }
1436 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001437 GenerateFPJumps(condition, true_target, false_target);
1438 break;
1439 }
1440 default:
1441 LOG(FATAL) << "Unexpected condition type " << type;
1442 }
1443
David Brazdil0debae72015-11-12 18:37:00 +00001444 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001445 __ jmp(false_target);
1446 }
David Brazdil0debae72015-11-12 18:37:00 +00001447
1448 if (fallthrough_target.IsLinked()) {
1449 __ Bind(&fallthrough_target);
1450 }
Mark Mendellc4701932015-04-10 13:18:51 -04001451}
1452
David Brazdil0debae72015-11-12 18:37:00 +00001453static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1454 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1455 // are set only strictly before `branch`. We can't use the eflags on long
1456 // conditions if they are materialized due to the complex branching.
1457 return cond->IsCondition() &&
1458 cond->GetNext() == branch &&
1459 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1460}
1461
Mark Mendell152408f2015-12-31 12:28:50 -05001462template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001463void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001464 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001465 LabelType* true_target,
1466 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001467 HInstruction* cond = instruction->InputAt(condition_input_index);
1468
1469 if (true_target == nullptr && false_target == nullptr) {
1470 // Nothing to do. The code always falls through.
1471 return;
1472 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001473 // Constant condition, statically compared against "true" (integer value 1).
1474 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001475 if (true_target != nullptr) {
1476 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001477 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001478 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001479 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001480 if (false_target != nullptr) {
1481 __ jmp(false_target);
1482 }
1483 }
1484 return;
1485 }
1486
1487 // The following code generates these patterns:
1488 // (1) true_target == nullptr && false_target != nullptr
1489 // - opposite condition true => branch to false_target
1490 // (2) true_target != nullptr && false_target == nullptr
1491 // - condition true => branch to true_target
1492 // (3) true_target != nullptr && false_target != nullptr
1493 // - condition true => branch to true_target
1494 // - branch to false_target
1495 if (IsBooleanValueOrMaterializedCondition(cond)) {
1496 if (AreEflagsSetFrom(cond, instruction)) {
1497 if (true_target == nullptr) {
1498 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1499 } else {
1500 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1501 }
1502 } else {
1503 // Materialized condition, compare against 0.
1504 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1505 if (lhs.IsRegister()) {
1506 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1507 } else {
1508 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1509 }
1510 if (true_target == nullptr) {
1511 __ j(kEqual, false_target);
1512 } else {
1513 __ j(kNotEqual, true_target);
1514 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001515 }
1516 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001517 // Condition has not been materialized, use its inputs as the
1518 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001519 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001520
David Brazdil0debae72015-11-12 18:37:00 +00001521 // If this is a long or FP comparison that has been folded into
1522 // the HCondition, generate the comparison directly.
1523 Primitive::Type type = condition->InputAt(0)->GetType();
1524 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1525 GenerateCompareTestAndBranch(condition, true_target, false_target);
1526 return;
1527 }
1528
1529 Location lhs = condition->GetLocations()->InAt(0);
1530 Location rhs = condition->GetLocations()->InAt(1);
1531 if (rhs.IsRegister()) {
1532 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1533 } else if (rhs.IsConstant()) {
1534 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001535 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001536 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001537 __ cmpl(lhs.AsRegister<CpuRegister>(),
1538 Address(CpuRegister(RSP), rhs.GetStackIndex()));
1539 }
1540 if (true_target == nullptr) {
1541 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1542 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001543 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001544 }
Dave Allison20dfc792014-06-16 20:44:29 -07001545 }
David Brazdil0debae72015-11-12 18:37:00 +00001546
1547 // If neither branch falls through (case 3), the conditional branch to `true_target`
1548 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1549 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001550 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001551 }
1552}
1553
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001554void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001555 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1556 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001557 locations->SetInAt(0, Location::Any());
1558 }
1559}
1560
1561void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001562 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1563 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1564 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1565 nullptr : codegen_->GetLabelOf(true_successor);
1566 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1567 nullptr : codegen_->GetLabelOf(false_successor);
1568 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001569}
1570
1571void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1572 LocationSummary* locations = new (GetGraph()->GetArena())
1573 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00001574 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001575 locations->SetInAt(0, Location::Any());
1576 }
1577}
1578
1579void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001580 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001581 GenerateTestAndBranch<Label>(deoptimize,
1582 /* condition_input_index */ 0,
1583 slow_path->GetEntryLabel(),
1584 /* false_target */ nullptr);
1585}
1586
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001587static bool SelectCanUseCMOV(HSelect* select) {
1588 // There are no conditional move instructions for XMMs.
1589 if (Primitive::IsFloatingPointType(select->GetType())) {
1590 return false;
1591 }
1592
1593 // A FP condition doesn't generate the single CC that we need.
1594 HInstruction* condition = select->GetCondition();
1595 if (condition->IsCondition() &&
1596 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1597 return false;
1598 }
1599
1600 // We can generate a CMOV for this Select.
1601 return true;
1602}
1603
David Brazdil74eb1b22015-12-14 11:44:01 +00001604void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1605 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1606 if (Primitive::IsFloatingPointType(select->GetType())) {
1607 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001608 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001609 } else {
1610 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001611 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001612 if (select->InputAt(1)->IsConstant()) {
1613 locations->SetInAt(1, Location::RequiresRegister());
1614 } else {
1615 locations->SetInAt(1, Location::Any());
1616 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001617 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001618 locations->SetInAt(1, Location::Any());
1619 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001620 }
1621 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1622 locations->SetInAt(2, Location::RequiresRegister());
1623 }
1624 locations->SetOut(Location::SameAsFirstInput());
1625}
1626
1627void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1628 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001629 if (SelectCanUseCMOV(select)) {
1630 // If both the condition and the source types are integer, we can generate
1631 // a CMOV to implement Select.
1632 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001633 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001634 DCHECK(locations->InAt(0).Equals(locations->Out()));
1635
1636 HInstruction* select_condition = select->GetCondition();
1637 Condition cond = kNotEqual;
1638
1639 // Figure out how to test the 'condition'.
1640 if (select_condition->IsCondition()) {
1641 HCondition* condition = select_condition->AsCondition();
1642 if (!condition->IsEmittedAtUseSite()) {
1643 // This was a previously materialized condition.
1644 // Can we use the existing condition code?
1645 if (AreEflagsSetFrom(condition, select)) {
1646 // Materialization was the previous instruction. Condition codes are right.
1647 cond = X86_64IntegerCondition(condition->GetCondition());
1648 } else {
1649 // No, we have to recreate the condition code.
1650 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1651 __ testl(cond_reg, cond_reg);
1652 }
1653 } else {
1654 GenerateCompareTest(condition);
1655 cond = X86_64IntegerCondition(condition->GetCondition());
1656 }
1657 } else {
1658 // Must be a boolean condition, which needs to be compared to 0.
1659 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1660 __ testl(cond_reg, cond_reg);
1661 }
1662
1663 // If the condition is true, overwrite the output, which already contains false.
1664 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001665 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1666 if (value_true_loc.IsRegister()) {
1667 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1668 } else {
1669 __ cmov(cond,
1670 value_false,
1671 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1672 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001673 } else {
1674 NearLabel false_target;
1675 GenerateTestAndBranch<NearLabel>(select,
1676 /* condition_input_index */ 2,
1677 /* true_target */ nullptr,
1678 &false_target);
1679 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1680 __ Bind(&false_target);
1681 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001682}
1683
David Srbecky0cf44932015-12-09 14:09:59 +00001684void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1685 new (GetGraph()->GetArena()) LocationSummary(info);
1686}
1687
David Srbeckyd28f4a02016-03-14 17:14:24 +00001688void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1689 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001690}
1691
1692void CodeGeneratorX86_64::GenerateNop() {
1693 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001694}
1695
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001696void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001697 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001698 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001699 // Handle the long/FP comparisons made in instruction simplification.
1700 switch (cond->InputAt(0)->GetType()) {
1701 case Primitive::kPrimLong:
1702 locations->SetInAt(0, Location::RequiresRegister());
1703 locations->SetInAt(1, Location::Any());
1704 break;
1705 case Primitive::kPrimFloat:
1706 case Primitive::kPrimDouble:
1707 locations->SetInAt(0, Location::RequiresFpuRegister());
1708 locations->SetInAt(1, Location::Any());
1709 break;
1710 default:
1711 locations->SetInAt(0, Location::RequiresRegister());
1712 locations->SetInAt(1, Location::Any());
1713 break;
1714 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001715 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001716 locations->SetOut(Location::RequiresRegister());
1717 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001718}
1719
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001720void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001721 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001722 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001723 }
Mark Mendellc4701932015-04-10 13:18:51 -04001724
1725 LocationSummary* locations = cond->GetLocations();
1726 Location lhs = locations->InAt(0);
1727 Location rhs = locations->InAt(1);
1728 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001729 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001730
1731 switch (cond->InputAt(0)->GetType()) {
1732 default:
1733 // Integer case.
1734
1735 // Clear output register: setcc only sets the low byte.
1736 __ xorl(reg, reg);
1737
1738 if (rhs.IsRegister()) {
1739 __ cmpl(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1740 } else if (rhs.IsConstant()) {
1741 int32_t constant = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
Aart Bika19616e2016-02-01 18:57:58 -08001742 codegen_->Compare32BitValue(lhs.AsRegister<CpuRegister>(), constant);
Mark Mendellc4701932015-04-10 13:18:51 -04001743 } else {
1744 __ cmpl(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1745 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001746 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001747 return;
1748 case Primitive::kPrimLong:
1749 // Clear output register: setcc only sets the low byte.
1750 __ xorl(reg, reg);
1751
1752 if (rhs.IsRegister()) {
1753 __ cmpq(lhs.AsRegister<CpuRegister>(), rhs.AsRegister<CpuRegister>());
1754 } else if (rhs.IsConstant()) {
1755 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001756 codegen_->Compare64BitValue(lhs.AsRegister<CpuRegister>(), value);
Mark Mendellc4701932015-04-10 13:18:51 -04001757 } else {
1758 __ cmpq(lhs.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
1759 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001760 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001761 return;
1762 case Primitive::kPrimFloat: {
1763 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1764 if (rhs.IsConstant()) {
1765 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1766 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1767 } else if (rhs.IsStackSlot()) {
1768 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1769 } else {
1770 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1771 }
1772 GenerateFPJumps(cond, &true_label, &false_label);
1773 break;
1774 }
1775 case Primitive::kPrimDouble: {
1776 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1777 if (rhs.IsConstant()) {
1778 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1779 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1780 } else if (rhs.IsDoubleStackSlot()) {
1781 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1782 } else {
1783 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1784 }
1785 GenerateFPJumps(cond, &true_label, &false_label);
1786 break;
1787 }
1788 }
1789
1790 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001791 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001792
Roland Levillain4fa13f62015-07-06 18:11:54 +01001793 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001794 __ Bind(&false_label);
1795 __ xorl(reg, reg);
1796 __ jmp(&done_label);
1797
Roland Levillain4fa13f62015-07-06 18:11:54 +01001798 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001799 __ Bind(&true_label);
1800 __ movl(reg, Immediate(1));
1801 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001802}
1803
1804void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001805 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001806}
1807
1808void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001809 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001810}
1811
1812void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001813 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001814}
1815
1816void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001817 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001818}
1819
1820void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001821 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001822}
1823
1824void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001825 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001826}
1827
1828void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001829 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001830}
1831
1832void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001833 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001834}
1835
1836void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001837 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001838}
1839
1840void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001841 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001842}
1843
1844void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001845 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001846}
1847
1848void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001849 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001850}
1851
Aart Bike9f37602015-10-09 11:15:55 -07001852void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001853 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001854}
1855
1856void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001857 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001858}
1859
1860void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001861 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001862}
1863
1864void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001865 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001866}
1867
1868void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001869 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001870}
1871
1872void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001873 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001874}
1875
1876void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001877 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001878}
1879
1880void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001881 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001882}
1883
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001884void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001885 LocationSummary* locations =
1886 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001887 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001888 case Primitive::kPrimBoolean:
1889 case Primitive::kPrimByte:
1890 case Primitive::kPrimShort:
1891 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001892 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001893 case Primitive::kPrimLong: {
1894 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001895 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001896 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1897 break;
1898 }
1899 case Primitive::kPrimFloat:
1900 case Primitive::kPrimDouble: {
1901 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001902 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001903 locations->SetOut(Location::RequiresRegister());
1904 break;
1905 }
1906 default:
1907 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1908 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001909}
1910
1911void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001912 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001913 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001914 Location left = locations->InAt(0);
1915 Location right = locations->InAt(1);
1916
Mark Mendell0c9497d2015-08-21 09:30:05 -04001917 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001918 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001919 Condition less_cond = kLess;
1920
Calin Juravleddb7df22014-11-25 20:56:51 +00001921 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001922 case Primitive::kPrimBoolean:
1923 case Primitive::kPrimByte:
1924 case Primitive::kPrimShort:
1925 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001926 case Primitive::kPrimInt: {
1927 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1928 if (right.IsConstant()) {
1929 int32_t value = right.GetConstant()->AsIntConstant()->GetValue();
1930 codegen_->Compare32BitValue(left_reg, value);
1931 } else if (right.IsStackSlot()) {
1932 __ cmpl(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1933 } else {
1934 __ cmpl(left_reg, right.AsRegister<CpuRegister>());
1935 }
1936 break;
1937 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001938 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001939 CpuRegister left_reg = left.AsRegister<CpuRegister>();
1940 if (right.IsConstant()) {
1941 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Aart Bika19616e2016-02-01 18:57:58 -08001942 codegen_->Compare64BitValue(left_reg, value);
Mark Mendell40741f32015-04-20 22:10:34 -04001943 } else if (right.IsDoubleStackSlot()) {
1944 __ cmpq(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04001945 } else {
1946 __ cmpq(left_reg, right.AsRegister<CpuRegister>());
1947 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001948 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001949 }
1950 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001951 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1952 if (right.IsConstant()) {
1953 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1954 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1955 } else if (right.IsStackSlot()) {
1956 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1957 } else {
1958 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1959 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001960 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001961 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001962 break;
1963 }
1964 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001965 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1966 if (right.IsConstant()) {
1967 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1968 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1969 } else if (right.IsDoubleStackSlot()) {
1970 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1971 } else {
1972 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1973 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001974 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001975 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001976 break;
1977 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001978 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001979 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001980 }
Aart Bika19616e2016-02-01 18:57:58 -08001981
Calin Juravleddb7df22014-11-25 20:56:51 +00001982 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001983 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001984 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001985
Calin Juravle91debbc2014-11-26 19:01:09 +00001986 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001987 __ movl(out, Immediate(1));
1988 __ jmp(&done);
1989
1990 __ Bind(&less);
1991 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001992
1993 __ Bind(&done);
1994}
1995
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001996void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001997 LocationSummary* locations =
1998 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001999 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002000}
2001
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002002void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002003 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002004}
2005
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002006void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2007 LocationSummary* locations =
2008 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2009 locations->SetOut(Location::ConstantLocation(constant));
2010}
2011
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002012void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002013 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002014}
2015
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002016void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002017 LocationSummary* locations =
2018 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002019 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002020}
2021
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002022void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002023 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002024}
2025
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002026void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2027 LocationSummary* locations =
2028 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2029 locations->SetOut(Location::ConstantLocation(constant));
2030}
2031
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002032void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002033 // Will be generated at use site.
2034}
2035
2036void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2037 LocationSummary* locations =
2038 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2039 locations->SetOut(Location::ConstantLocation(constant));
2040}
2041
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002042void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2043 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002044 // Will be generated at use site.
2045}
2046
Calin Juravle27df7582015-04-17 19:12:31 +01002047void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2048 memory_barrier->SetLocations(nullptr);
2049}
2050
2051void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002052 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002053}
2054
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002055void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2056 ret->SetLocations(nullptr);
2057}
2058
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002059void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002060 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002061}
2062
2063void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002064 LocationSummary* locations =
2065 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002066 switch (ret->InputAt(0)->GetType()) {
2067 case Primitive::kPrimBoolean:
2068 case Primitive::kPrimByte:
2069 case Primitive::kPrimChar:
2070 case Primitive::kPrimShort:
2071 case Primitive::kPrimInt:
2072 case Primitive::kPrimNot:
2073 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002074 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002075 break;
2076
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002077 case Primitive::kPrimFloat:
2078 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002079 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002080 break;
2081
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002082 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002083 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002084 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002085}
2086
2087void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2088 if (kIsDebugBuild) {
2089 switch (ret->InputAt(0)->GetType()) {
2090 case Primitive::kPrimBoolean:
2091 case Primitive::kPrimByte:
2092 case Primitive::kPrimChar:
2093 case Primitive::kPrimShort:
2094 case Primitive::kPrimInt:
2095 case Primitive::kPrimNot:
2096 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002097 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002098 break;
2099
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002100 case Primitive::kPrimFloat:
2101 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002102 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002103 XMM0);
2104 break;
2105
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002106 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002107 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002108 }
2109 }
2110 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002111}
2112
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002113Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2114 switch (type) {
2115 case Primitive::kPrimBoolean:
2116 case Primitive::kPrimByte:
2117 case Primitive::kPrimChar:
2118 case Primitive::kPrimShort:
2119 case Primitive::kPrimInt:
2120 case Primitive::kPrimNot:
2121 case Primitive::kPrimLong:
2122 return Location::RegisterLocation(RAX);
2123
2124 case Primitive::kPrimVoid:
2125 return Location::NoLocation();
2126
2127 case Primitive::kPrimDouble:
2128 case Primitive::kPrimFloat:
2129 return Location::FpuRegisterLocation(XMM0);
2130 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002131
2132 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002133}
2134
2135Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2136 return Location::RegisterLocation(kMethodRegisterArgument);
2137}
2138
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002139Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002140 switch (type) {
2141 case Primitive::kPrimBoolean:
2142 case Primitive::kPrimByte:
2143 case Primitive::kPrimChar:
2144 case Primitive::kPrimShort:
2145 case Primitive::kPrimInt:
2146 case Primitive::kPrimNot: {
2147 uint32_t index = gp_index_++;
2148 stack_index_++;
2149 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002150 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002151 } else {
2152 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2153 }
2154 }
2155
2156 case Primitive::kPrimLong: {
2157 uint32_t index = gp_index_;
2158 stack_index_ += 2;
2159 if (index < calling_convention.GetNumberOfRegisters()) {
2160 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002161 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002162 } else {
2163 gp_index_ += 2;
2164 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2165 }
2166 }
2167
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002168 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002169 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002170 stack_index_++;
2171 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002172 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002173 } else {
2174 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2175 }
2176 }
2177
2178 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002179 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002180 stack_index_ += 2;
2181 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002182 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002183 } else {
2184 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2185 }
2186 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002187
2188 case Primitive::kPrimVoid:
2189 LOG(FATAL) << "Unexpected parameter type " << type;
2190 break;
2191 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002192 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002193}
2194
Calin Juravle175dc732015-08-25 15:42:32 +01002195void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2196 // The trampoline uses the same calling convention as dex calling conventions,
2197 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2198 // the method_idx.
2199 HandleInvoke(invoke);
2200}
2201
2202void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2203 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2204}
2205
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002206void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002207 // Explicit clinit checks triggered by static invokes must have been pruned by
2208 // art::PrepareForRegisterAllocation.
2209 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002210
Mark Mendellfb8d2792015-03-31 22:16:59 -04002211 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002212 if (intrinsic.TryDispatch(invoke)) {
2213 return;
2214 }
2215
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002216 HandleInvoke(invoke);
2217}
2218
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002219static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2220 if (invoke->GetLocations()->Intrinsified()) {
2221 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2222 intrinsic.Dispatch(invoke);
2223 return true;
2224 }
2225 return false;
2226}
2227
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002228void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002229 // Explicit clinit checks triggered by static invokes must have been pruned by
2230 // art::PrepareForRegisterAllocation.
2231 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002232
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002233 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2234 return;
2235 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002236
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002237 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002238 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002239 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002240 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002241}
2242
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002243void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002244 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002245 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002246}
2247
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002248void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002249 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002250 if (intrinsic.TryDispatch(invoke)) {
2251 return;
2252 }
2253
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002254 HandleInvoke(invoke);
2255}
2256
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002257void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002258 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2259 return;
2260 }
2261
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002262 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002263 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002264 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002265}
2266
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002267void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2268 HandleInvoke(invoke);
2269 // Add the hidden argument.
2270 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2271}
2272
2273void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2274 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002275 LocationSummary* locations = invoke->GetLocations();
2276 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2277 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray88f288e2016-06-29 08:17:52 +00002278 uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset(
2279 invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002280 Location receiver = locations->InAt(0);
2281 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2282
Roland Levillain0d5a2812015-11-13 10:07:31 +00002283 // Set the hidden argument. This is safe to do this here, as RAX
2284 // won't be modified thereafter, before the `call` instruction.
2285 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002286 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002287
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002288 if (receiver.IsStackSlot()) {
2289 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002290 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002291 __ movl(temp, Address(temp, class_offset));
2292 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002293 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002294 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002295 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002296 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002297 // Instead of simply (possibly) unpoisoning `temp` here, we should
2298 // emit a read barrier for the previous class reference load.
2299 // However this is not required in practice, as this is an
2300 // intermediate/temporary reference and because the current
2301 // concurrent copying collector keeps the from-space memory
2302 // intact/accessible until the end of the marking phase (the
2303 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002304 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002305 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002306 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002307 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002308 __ call(Address(temp,
2309 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002310
2311 DCHECK(!codegen_->IsLeafMethod());
2312 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2313}
2314
Roland Levillain88cb1752014-10-20 16:36:47 +01002315void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2316 LocationSummary* locations =
2317 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2318 switch (neg->GetResultType()) {
2319 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002320 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002321 locations->SetInAt(0, Location::RequiresRegister());
2322 locations->SetOut(Location::SameAsFirstInput());
2323 break;
2324
Roland Levillain88cb1752014-10-20 16:36:47 +01002325 case Primitive::kPrimFloat:
2326 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002327 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002328 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002329 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002330 break;
2331
2332 default:
2333 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2334 }
2335}
2336
2337void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2338 LocationSummary* locations = neg->GetLocations();
2339 Location out = locations->Out();
2340 Location in = locations->InAt(0);
2341 switch (neg->GetResultType()) {
2342 case Primitive::kPrimInt:
2343 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002344 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002345 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002346 break;
2347
2348 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002349 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002350 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002351 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002352 break;
2353
Roland Levillain5368c212014-11-27 15:03:41 +00002354 case Primitive::kPrimFloat: {
2355 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002356 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002357 // Implement float negation with an exclusive or with value
2358 // 0x80000000 (mask for bit 31, representing the sign of a
2359 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002360 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002361 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002362 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002363 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002364
Roland Levillain5368c212014-11-27 15:03:41 +00002365 case Primitive::kPrimDouble: {
2366 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002367 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002368 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002369 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002370 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002371 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002372 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002373 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002374 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002375
2376 default:
2377 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2378 }
2379}
2380
Roland Levillaindff1f282014-11-05 14:15:05 +00002381void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2382 LocationSummary* locations =
2383 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2384 Primitive::Type result_type = conversion->GetResultType();
2385 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002386 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002387
David Brazdilb2bd1c52015-03-25 11:17:37 +00002388 // The Java language does not allow treating boolean as an integral type but
2389 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002390
Roland Levillaindff1f282014-11-05 14:15:05 +00002391 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002392 case Primitive::kPrimByte:
2393 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002394 case Primitive::kPrimLong:
2395 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002396 case Primitive::kPrimBoolean:
2397 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002398 case Primitive::kPrimShort:
2399 case Primitive::kPrimInt:
2400 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002401 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002402 locations->SetInAt(0, Location::Any());
2403 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2404 break;
2405
2406 default:
2407 LOG(FATAL) << "Unexpected type conversion from " << input_type
2408 << " to " << result_type;
2409 }
2410 break;
2411
Roland Levillain01a8d712014-11-14 16:27:39 +00002412 case Primitive::kPrimShort:
2413 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002414 case Primitive::kPrimLong:
2415 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002416 case Primitive::kPrimBoolean:
2417 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002418 case Primitive::kPrimByte:
2419 case Primitive::kPrimInt:
2420 case Primitive::kPrimChar:
2421 // Processing a Dex `int-to-short' instruction.
2422 locations->SetInAt(0, Location::Any());
2423 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2424 break;
2425
2426 default:
2427 LOG(FATAL) << "Unexpected type conversion from " << input_type
2428 << " to " << result_type;
2429 }
2430 break;
2431
Roland Levillain946e1432014-11-11 17:35:19 +00002432 case Primitive::kPrimInt:
2433 switch (input_type) {
2434 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002435 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002436 locations->SetInAt(0, Location::Any());
2437 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2438 break;
2439
2440 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002441 // Processing a Dex `float-to-int' instruction.
2442 locations->SetInAt(0, Location::RequiresFpuRegister());
2443 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002444 break;
2445
Roland Levillain946e1432014-11-11 17:35:19 +00002446 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002447 // Processing a Dex `double-to-int' instruction.
2448 locations->SetInAt(0, Location::RequiresFpuRegister());
2449 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002450 break;
2451
2452 default:
2453 LOG(FATAL) << "Unexpected type conversion from " << input_type
2454 << " to " << result_type;
2455 }
2456 break;
2457
Roland Levillaindff1f282014-11-05 14:15:05 +00002458 case Primitive::kPrimLong:
2459 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002460 case Primitive::kPrimBoolean:
2461 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002462 case Primitive::kPrimByte:
2463 case Primitive::kPrimShort:
2464 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002465 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002466 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002467 // TODO: We would benefit from a (to-be-implemented)
2468 // Location::RegisterOrStackSlot requirement for this input.
2469 locations->SetInAt(0, Location::RequiresRegister());
2470 locations->SetOut(Location::RequiresRegister());
2471 break;
2472
2473 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002474 // Processing a Dex `float-to-long' instruction.
2475 locations->SetInAt(0, Location::RequiresFpuRegister());
2476 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002477 break;
2478
Roland Levillaindff1f282014-11-05 14:15:05 +00002479 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002480 // Processing a Dex `double-to-long' instruction.
2481 locations->SetInAt(0, Location::RequiresFpuRegister());
2482 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002483 break;
2484
2485 default:
2486 LOG(FATAL) << "Unexpected type conversion from " << input_type
2487 << " to " << result_type;
2488 }
2489 break;
2490
Roland Levillain981e4542014-11-14 11:47:14 +00002491 case Primitive::kPrimChar:
2492 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002493 case Primitive::kPrimLong:
2494 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002495 case Primitive::kPrimBoolean:
2496 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002497 case Primitive::kPrimByte:
2498 case Primitive::kPrimShort:
2499 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002500 // Processing a Dex `int-to-char' instruction.
2501 locations->SetInAt(0, Location::Any());
2502 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2503 break;
2504
2505 default:
2506 LOG(FATAL) << "Unexpected type conversion from " << input_type
2507 << " to " << result_type;
2508 }
2509 break;
2510
Roland Levillaindff1f282014-11-05 14:15:05 +00002511 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002512 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002513 case Primitive::kPrimBoolean:
2514 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002515 case Primitive::kPrimByte:
2516 case Primitive::kPrimShort:
2517 case Primitive::kPrimInt:
2518 case Primitive::kPrimChar:
2519 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002520 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002521 locations->SetOut(Location::RequiresFpuRegister());
2522 break;
2523
2524 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002525 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002526 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002527 locations->SetOut(Location::RequiresFpuRegister());
2528 break;
2529
Roland Levillaincff13742014-11-17 14:32:17 +00002530 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002531 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002532 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002533 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002534 break;
2535
2536 default:
2537 LOG(FATAL) << "Unexpected type conversion from " << input_type
2538 << " to " << result_type;
2539 };
2540 break;
2541
Roland Levillaindff1f282014-11-05 14:15:05 +00002542 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002543 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002544 case Primitive::kPrimBoolean:
2545 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002546 case Primitive::kPrimByte:
2547 case Primitive::kPrimShort:
2548 case Primitive::kPrimInt:
2549 case Primitive::kPrimChar:
2550 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002551 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002552 locations->SetOut(Location::RequiresFpuRegister());
2553 break;
2554
2555 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002556 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002557 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002558 locations->SetOut(Location::RequiresFpuRegister());
2559 break;
2560
Roland Levillaincff13742014-11-17 14:32:17 +00002561 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002562 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002563 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002564 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002565 break;
2566
2567 default:
2568 LOG(FATAL) << "Unexpected type conversion from " << input_type
2569 << " to " << result_type;
2570 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002571 break;
2572
2573 default:
2574 LOG(FATAL) << "Unexpected type conversion from " << input_type
2575 << " to " << result_type;
2576 }
2577}
2578
2579void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2580 LocationSummary* locations = conversion->GetLocations();
2581 Location out = locations->Out();
2582 Location in = locations->InAt(0);
2583 Primitive::Type result_type = conversion->GetResultType();
2584 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002585 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002586 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002587 case Primitive::kPrimByte:
2588 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002589 case Primitive::kPrimLong:
2590 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002591 case Primitive::kPrimBoolean:
2592 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002593 case Primitive::kPrimShort:
2594 case Primitive::kPrimInt:
2595 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002596 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002597 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002598 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002599 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002600 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002601 Address(CpuRegister(RSP), in.GetStackIndex()));
2602 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002603 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002604 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002605 }
2606 break;
2607
2608 default:
2609 LOG(FATAL) << "Unexpected type conversion from " << input_type
2610 << " to " << result_type;
2611 }
2612 break;
2613
Roland Levillain01a8d712014-11-14 16:27:39 +00002614 case Primitive::kPrimShort:
2615 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002616 case Primitive::kPrimLong:
2617 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002618 case Primitive::kPrimBoolean:
2619 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002620 case Primitive::kPrimByte:
2621 case Primitive::kPrimInt:
2622 case Primitive::kPrimChar:
2623 // Processing a Dex `int-to-short' instruction.
2624 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002625 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002626 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002627 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002628 Address(CpuRegister(RSP), in.GetStackIndex()));
2629 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002630 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002631 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002632 }
2633 break;
2634
2635 default:
2636 LOG(FATAL) << "Unexpected type conversion from " << input_type
2637 << " to " << result_type;
2638 }
2639 break;
2640
Roland Levillain946e1432014-11-11 17:35:19 +00002641 case Primitive::kPrimInt:
2642 switch (input_type) {
2643 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002644 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002645 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002646 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002647 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002648 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002649 Address(CpuRegister(RSP), in.GetStackIndex()));
2650 } else {
2651 DCHECK(in.IsConstant());
2652 DCHECK(in.GetConstant()->IsLongConstant());
2653 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002654 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002655 }
2656 break;
2657
Roland Levillain3f8f9362014-12-02 17:45:01 +00002658 case Primitive::kPrimFloat: {
2659 // Processing a Dex `float-to-int' instruction.
2660 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2661 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002662 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002663
2664 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002665 // if input >= (float)INT_MAX goto done
2666 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002667 __ j(kAboveEqual, &done);
2668 // if input == NaN goto nan
2669 __ j(kUnordered, &nan);
2670 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002671 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002672 __ jmp(&done);
2673 __ Bind(&nan);
2674 // output = 0
2675 __ xorl(output, output);
2676 __ Bind(&done);
2677 break;
2678 }
2679
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002680 case Primitive::kPrimDouble: {
2681 // Processing a Dex `double-to-int' instruction.
2682 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2683 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002684 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002685
2686 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002687 // if input >= (double)INT_MAX goto done
2688 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002689 __ j(kAboveEqual, &done);
2690 // if input == NaN goto nan
2691 __ j(kUnordered, &nan);
2692 // output = double-to-int-truncate(input)
2693 __ cvttsd2si(output, input);
2694 __ jmp(&done);
2695 __ Bind(&nan);
2696 // output = 0
2697 __ xorl(output, output);
2698 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002699 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002700 }
Roland Levillain946e1432014-11-11 17:35:19 +00002701
2702 default:
2703 LOG(FATAL) << "Unexpected type conversion from " << input_type
2704 << " to " << result_type;
2705 }
2706 break;
2707
Roland Levillaindff1f282014-11-05 14:15:05 +00002708 case Primitive::kPrimLong:
2709 switch (input_type) {
2710 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002711 case Primitive::kPrimBoolean:
2712 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002713 case Primitive::kPrimByte:
2714 case Primitive::kPrimShort:
2715 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002716 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002717 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002718 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002719 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002720 break;
2721
Roland Levillain624279f2014-12-04 11:54:28 +00002722 case Primitive::kPrimFloat: {
2723 // Processing a Dex `float-to-long' instruction.
2724 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2725 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002726 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002727
Mark Mendell92e83bf2015-05-07 11:25:03 -04002728 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002729 // if input >= (float)LONG_MAX goto done
2730 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002731 __ j(kAboveEqual, &done);
2732 // if input == NaN goto nan
2733 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002734 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002735 __ cvttss2si(output, input, true);
2736 __ jmp(&done);
2737 __ Bind(&nan);
2738 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002739 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002740 __ Bind(&done);
2741 break;
2742 }
2743
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002744 case Primitive::kPrimDouble: {
2745 // Processing a Dex `double-to-long' instruction.
2746 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2747 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002748 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002749
Mark Mendell92e83bf2015-05-07 11:25:03 -04002750 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002751 // if input >= (double)LONG_MAX goto done
2752 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002753 __ j(kAboveEqual, &done);
2754 // if input == NaN goto nan
2755 __ j(kUnordered, &nan);
2756 // output = double-to-long-truncate(input)
2757 __ cvttsd2si(output, input, true);
2758 __ jmp(&done);
2759 __ Bind(&nan);
2760 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002761 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002762 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002763 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002764 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002765
2766 default:
2767 LOG(FATAL) << "Unexpected type conversion from " << input_type
2768 << " to " << result_type;
2769 }
2770 break;
2771
Roland Levillain981e4542014-11-14 11:47:14 +00002772 case Primitive::kPrimChar:
2773 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002774 case Primitive::kPrimLong:
2775 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002776 case Primitive::kPrimBoolean:
2777 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002778 case Primitive::kPrimByte:
2779 case Primitive::kPrimShort:
2780 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002781 // Processing a Dex `int-to-char' instruction.
2782 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002783 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002784 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002785 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002786 Address(CpuRegister(RSP), in.GetStackIndex()));
2787 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002788 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002789 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002790 }
2791 break;
2792
2793 default:
2794 LOG(FATAL) << "Unexpected type conversion from " << input_type
2795 << " to " << result_type;
2796 }
2797 break;
2798
Roland Levillaindff1f282014-11-05 14:15:05 +00002799 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002800 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002801 case Primitive::kPrimBoolean:
2802 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002803 case Primitive::kPrimByte:
2804 case Primitive::kPrimShort:
2805 case Primitive::kPrimInt:
2806 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002807 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002808 if (in.IsRegister()) {
2809 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2810 } else if (in.IsConstant()) {
2811 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2812 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002813 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002814 } else {
2815 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2816 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2817 }
Roland Levillaincff13742014-11-17 14:32:17 +00002818 break;
2819
2820 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002821 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002822 if (in.IsRegister()) {
2823 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2824 } else if (in.IsConstant()) {
2825 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2826 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002827 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002828 } else {
2829 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2830 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2831 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002832 break;
2833
Roland Levillaincff13742014-11-17 14:32:17 +00002834 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002835 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002836 if (in.IsFpuRegister()) {
2837 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2838 } else if (in.IsConstant()) {
2839 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2840 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002841 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002842 } else {
2843 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2844 Address(CpuRegister(RSP), in.GetStackIndex()));
2845 }
Roland Levillaincff13742014-11-17 14:32:17 +00002846 break;
2847
2848 default:
2849 LOG(FATAL) << "Unexpected type conversion from " << input_type
2850 << " to " << result_type;
2851 };
2852 break;
2853
Roland Levillaindff1f282014-11-05 14:15:05 +00002854 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002855 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002856 case Primitive::kPrimBoolean:
2857 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002858 case Primitive::kPrimByte:
2859 case Primitive::kPrimShort:
2860 case Primitive::kPrimInt:
2861 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002862 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002863 if (in.IsRegister()) {
2864 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2865 } else if (in.IsConstant()) {
2866 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2867 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002868 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002869 } else {
2870 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2871 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2872 }
Roland Levillaincff13742014-11-17 14:32:17 +00002873 break;
2874
2875 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002876 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002877 if (in.IsRegister()) {
2878 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2879 } else if (in.IsConstant()) {
2880 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2881 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002882 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002883 } else {
2884 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2885 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2886 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002887 break;
2888
Roland Levillaincff13742014-11-17 14:32:17 +00002889 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002890 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002891 if (in.IsFpuRegister()) {
2892 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2893 } else if (in.IsConstant()) {
2894 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2895 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002896 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002897 } else {
2898 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2899 Address(CpuRegister(RSP), in.GetStackIndex()));
2900 }
Roland Levillaincff13742014-11-17 14:32:17 +00002901 break;
2902
2903 default:
2904 LOG(FATAL) << "Unexpected type conversion from " << input_type
2905 << " to " << result_type;
2906 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002907 break;
2908
2909 default:
2910 LOG(FATAL) << "Unexpected type conversion from " << input_type
2911 << " to " << result_type;
2912 }
2913}
2914
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002915void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002916 LocationSummary* locations =
2917 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002918 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002919 case Primitive::kPrimInt: {
2920 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002921 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2922 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002923 break;
2924 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002925
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002926 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002927 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002928 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002929 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002930 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002931 break;
2932 }
2933
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002934 case Primitive::kPrimDouble:
2935 case Primitive::kPrimFloat: {
2936 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002937 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002938 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002939 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002940 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002941
2942 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002943 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002944 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002945}
2946
2947void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2948 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002949 Location first = locations->InAt(0);
2950 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002951 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002952
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002953 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002954 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002955 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002956 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2957 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002958 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2959 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002960 } else {
2961 __ leal(out.AsRegister<CpuRegister>(), Address(
2962 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2963 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002964 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002965 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2966 __ addl(out.AsRegister<CpuRegister>(),
2967 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2968 } else {
2969 __ leal(out.AsRegister<CpuRegister>(), Address(
2970 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2971 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002972 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002973 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002974 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002975 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002976 break;
2977 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002978
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002979 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002980 if (second.IsRegister()) {
2981 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2982 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002983 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2984 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002985 } else {
2986 __ leaq(out.AsRegister<CpuRegister>(), Address(
2987 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2988 }
2989 } else {
2990 DCHECK(second.IsConstant());
2991 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2992 int32_t int32_value = Low32Bits(value);
2993 DCHECK_EQ(int32_value, value);
2994 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2995 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2996 } else {
2997 __ leaq(out.AsRegister<CpuRegister>(), Address(
2998 first.AsRegister<CpuRegister>(), int32_value));
2999 }
3000 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003001 break;
3002 }
3003
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003004 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003005 if (second.IsFpuRegister()) {
3006 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3007 } else if (second.IsConstant()) {
3008 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003009 codegen_->LiteralFloatAddress(
3010 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003011 } else {
3012 DCHECK(second.IsStackSlot());
3013 __ addss(first.AsFpuRegister<XmmRegister>(),
3014 Address(CpuRegister(RSP), second.GetStackIndex()));
3015 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003016 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003017 }
3018
3019 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003020 if (second.IsFpuRegister()) {
3021 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3022 } else if (second.IsConstant()) {
3023 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003024 codegen_->LiteralDoubleAddress(
3025 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003026 } else {
3027 DCHECK(second.IsDoubleStackSlot());
3028 __ addsd(first.AsFpuRegister<XmmRegister>(),
3029 Address(CpuRegister(RSP), second.GetStackIndex()));
3030 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003031 break;
3032 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003033
3034 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003035 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003036 }
3037}
3038
3039void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003040 LocationSummary* locations =
3041 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003042 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003043 case Primitive::kPrimInt: {
3044 locations->SetInAt(0, Location::RequiresRegister());
3045 locations->SetInAt(1, Location::Any());
3046 locations->SetOut(Location::SameAsFirstInput());
3047 break;
3048 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003049 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003050 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003051 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003052 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003053 break;
3054 }
Calin Juravle11351682014-10-23 15:38:15 +01003055 case Primitive::kPrimFloat:
3056 case Primitive::kPrimDouble: {
3057 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003058 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003059 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003060 break;
Calin Juravle11351682014-10-23 15:38:15 +01003061 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003062 default:
Calin Juravle11351682014-10-23 15:38:15 +01003063 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003065}
3066
3067void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3068 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003069 Location first = locations->InAt(0);
3070 Location second = locations->InAt(1);
3071 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003072 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003073 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003074 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003075 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003076 } else if (second.IsConstant()) {
3077 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003078 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003079 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003080 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003081 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003082 break;
3083 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003084 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003085 if (second.IsConstant()) {
3086 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3087 DCHECK(IsInt<32>(value));
3088 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3089 } else {
3090 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3091 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003092 break;
3093 }
3094
Calin Juravle11351682014-10-23 15:38:15 +01003095 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003096 if (second.IsFpuRegister()) {
3097 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3098 } else if (second.IsConstant()) {
3099 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003100 codegen_->LiteralFloatAddress(
3101 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003102 } else {
3103 DCHECK(second.IsStackSlot());
3104 __ subss(first.AsFpuRegister<XmmRegister>(),
3105 Address(CpuRegister(RSP), second.GetStackIndex()));
3106 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003107 break;
Calin Juravle11351682014-10-23 15:38:15 +01003108 }
3109
3110 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003111 if (second.IsFpuRegister()) {
3112 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3113 } else if (second.IsConstant()) {
3114 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003115 codegen_->LiteralDoubleAddress(
3116 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003117 } else {
3118 DCHECK(second.IsDoubleStackSlot());
3119 __ subsd(first.AsFpuRegister<XmmRegister>(),
3120 Address(CpuRegister(RSP), second.GetStackIndex()));
3121 }
Calin Juravle11351682014-10-23 15:38:15 +01003122 break;
3123 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003124
3125 default:
Calin Juravle11351682014-10-23 15:38:15 +01003126 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003127 }
3128}
3129
Calin Juravle34bacdf2014-10-07 20:23:36 +01003130void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3131 LocationSummary* locations =
3132 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3133 switch (mul->GetResultType()) {
3134 case Primitive::kPrimInt: {
3135 locations->SetInAt(0, Location::RequiresRegister());
3136 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003137 if (mul->InputAt(1)->IsIntConstant()) {
3138 // Can use 3 operand multiply.
3139 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3140 } else {
3141 locations->SetOut(Location::SameAsFirstInput());
3142 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003143 break;
3144 }
3145 case Primitive::kPrimLong: {
3146 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003147 locations->SetInAt(1, Location::Any());
3148 if (mul->InputAt(1)->IsLongConstant() &&
3149 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003150 // Can use 3 operand multiply.
3151 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3152 } else {
3153 locations->SetOut(Location::SameAsFirstInput());
3154 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003155 break;
3156 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003157 case Primitive::kPrimFloat:
3158 case Primitive::kPrimDouble: {
3159 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003160 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003161 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003162 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003163 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003164
3165 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003166 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003167 }
3168}
3169
3170void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3171 LocationSummary* locations = mul->GetLocations();
3172 Location first = locations->InAt(0);
3173 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003174 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003175 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003176 case Primitive::kPrimInt:
3177 // The constant may have ended up in a register, so test explicitly to avoid
3178 // problems where the output may not be the same as the first operand.
3179 if (mul->InputAt(1)->IsIntConstant()) {
3180 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3181 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3182 } else if (second.IsRegister()) {
3183 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003184 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003185 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003186 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003187 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003188 __ imull(first.AsRegister<CpuRegister>(),
3189 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003190 }
3191 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003192 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003193 // The constant may have ended up in a register, so test explicitly to avoid
3194 // problems where the output may not be the same as the first operand.
3195 if (mul->InputAt(1)->IsLongConstant()) {
3196 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3197 if (IsInt<32>(value)) {
3198 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3199 Immediate(static_cast<int32_t>(value)));
3200 } else {
3201 // Have to use the constant area.
3202 DCHECK(first.Equals(out));
3203 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3204 }
3205 } else if (second.IsRegister()) {
3206 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003207 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003208 } else {
3209 DCHECK(second.IsDoubleStackSlot());
3210 DCHECK(first.Equals(out));
3211 __ imulq(first.AsRegister<CpuRegister>(),
3212 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003213 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003214 break;
3215 }
3216
Calin Juravleb5bfa962014-10-21 18:02:24 +01003217 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003218 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003219 if (second.IsFpuRegister()) {
3220 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3221 } else if (second.IsConstant()) {
3222 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003223 codegen_->LiteralFloatAddress(
3224 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003225 } else {
3226 DCHECK(second.IsStackSlot());
3227 __ mulss(first.AsFpuRegister<XmmRegister>(),
3228 Address(CpuRegister(RSP), second.GetStackIndex()));
3229 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003230 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003231 }
3232
3233 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003234 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003235 if (second.IsFpuRegister()) {
3236 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3237 } else if (second.IsConstant()) {
3238 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003239 codegen_->LiteralDoubleAddress(
3240 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003241 } else {
3242 DCHECK(second.IsDoubleStackSlot());
3243 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3244 Address(CpuRegister(RSP), second.GetStackIndex()));
3245 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003246 break;
3247 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003248
3249 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003250 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003251 }
3252}
3253
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003254void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3255 uint32_t stack_adjustment, bool is_float) {
3256 if (source.IsStackSlot()) {
3257 DCHECK(is_float);
3258 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3259 } else if (source.IsDoubleStackSlot()) {
3260 DCHECK(!is_float);
3261 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3262 } else {
3263 // Write the value to the temporary location on the stack and load to FP stack.
3264 if (is_float) {
3265 Location stack_temp = Location::StackSlot(temp_offset);
3266 codegen_->Move(stack_temp, source);
3267 __ flds(Address(CpuRegister(RSP), temp_offset));
3268 } else {
3269 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3270 codegen_->Move(stack_temp, source);
3271 __ fldl(Address(CpuRegister(RSP), temp_offset));
3272 }
3273 }
3274}
3275
3276void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3277 Primitive::Type type = rem->GetResultType();
3278 bool is_float = type == Primitive::kPrimFloat;
3279 size_t elem_size = Primitive::ComponentSize(type);
3280 LocationSummary* locations = rem->GetLocations();
3281 Location first = locations->InAt(0);
3282 Location second = locations->InAt(1);
3283 Location out = locations->Out();
3284
3285 // Create stack space for 2 elements.
3286 // TODO: enhance register allocator to ask for stack temporaries.
3287 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3288
3289 // Load the values to the FP stack in reverse order, using temporaries if needed.
3290 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3291 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3292
3293 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003294 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003295 __ Bind(&retry);
3296 __ fprem();
3297
3298 // Move FP status to AX.
3299 __ fstsw();
3300
3301 // And see if the argument reduction is complete. This is signaled by the
3302 // C2 FPU flag bit set to 0.
3303 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3304 __ j(kNotEqual, &retry);
3305
3306 // We have settled on the final value. Retrieve it into an XMM register.
3307 // Store FP top of stack to real stack.
3308 if (is_float) {
3309 __ fsts(Address(CpuRegister(RSP), 0));
3310 } else {
3311 __ fstl(Address(CpuRegister(RSP), 0));
3312 }
3313
3314 // Pop the 2 items from the FP stack.
3315 __ fucompp();
3316
3317 // Load the value from the stack into an XMM register.
3318 DCHECK(out.IsFpuRegister()) << out;
3319 if (is_float) {
3320 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3321 } else {
3322 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3323 }
3324
3325 // And remove the temporary stack space we allocated.
3326 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3327}
3328
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003329void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3330 DCHECK(instruction->IsDiv() || instruction->IsRem());
3331
3332 LocationSummary* locations = instruction->GetLocations();
3333 Location second = locations->InAt(1);
3334 DCHECK(second.IsConstant());
3335
3336 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3337 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003338 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003339
3340 DCHECK(imm == 1 || imm == -1);
3341
3342 switch (instruction->GetResultType()) {
3343 case Primitive::kPrimInt: {
3344 if (instruction->IsRem()) {
3345 __ xorl(output_register, output_register);
3346 } else {
3347 __ movl(output_register, input_register);
3348 if (imm == -1) {
3349 __ negl(output_register);
3350 }
3351 }
3352 break;
3353 }
3354
3355 case Primitive::kPrimLong: {
3356 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003357 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003358 } else {
3359 __ movq(output_register, input_register);
3360 if (imm == -1) {
3361 __ negq(output_register);
3362 }
3363 }
3364 break;
3365 }
3366
3367 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003368 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003369 }
3370}
3371
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003372void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003373 LocationSummary* locations = instruction->GetLocations();
3374 Location second = locations->InAt(1);
3375
3376 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3377 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3378
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003379 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003380 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3381 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003382
3383 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3384
3385 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003386 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003387 __ testl(numerator, numerator);
3388 __ cmov(kGreaterEqual, tmp, numerator);
3389 int shift = CTZ(imm);
3390 __ sarl(tmp, Immediate(shift));
3391
3392 if (imm < 0) {
3393 __ negl(tmp);
3394 }
3395
3396 __ movl(output_register, tmp);
3397 } else {
3398 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3399 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3400
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003401 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003402 __ addq(rdx, numerator);
3403 __ testq(numerator, numerator);
3404 __ cmov(kGreaterEqual, rdx, numerator);
3405 int shift = CTZ(imm);
3406 __ sarq(rdx, Immediate(shift));
3407
3408 if (imm < 0) {
3409 __ negq(rdx);
3410 }
3411
3412 __ movq(output_register, rdx);
3413 }
3414}
3415
3416void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3417 DCHECK(instruction->IsDiv() || instruction->IsRem());
3418
3419 LocationSummary* locations = instruction->GetLocations();
3420 Location second = locations->InAt(1);
3421
3422 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3423 : locations->GetTemp(0).AsRegister<CpuRegister>();
3424 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3425 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3426 : locations->Out().AsRegister<CpuRegister>();
3427 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3428
3429 DCHECK_EQ(RAX, eax.AsRegister());
3430 DCHECK_EQ(RDX, edx.AsRegister());
3431 if (instruction->IsDiv()) {
3432 DCHECK_EQ(RAX, out.AsRegister());
3433 } else {
3434 DCHECK_EQ(RDX, out.AsRegister());
3435 }
3436
3437 int64_t magic;
3438 int shift;
3439
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003440 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003441 if (instruction->GetResultType() == Primitive::kPrimInt) {
3442 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3443
3444 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3445
3446 __ movl(numerator, eax);
3447
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003448 __ movl(eax, Immediate(magic));
3449 __ imull(numerator);
3450
3451 if (imm > 0 && magic < 0) {
3452 __ addl(edx, numerator);
3453 } else if (imm < 0 && magic > 0) {
3454 __ subl(edx, numerator);
3455 }
3456
3457 if (shift != 0) {
3458 __ sarl(edx, Immediate(shift));
3459 }
3460
3461 __ movl(eax, edx);
3462 __ shrl(edx, Immediate(31));
3463 __ addl(edx, eax);
3464
3465 if (instruction->IsRem()) {
3466 __ movl(eax, numerator);
3467 __ imull(edx, Immediate(imm));
3468 __ subl(eax, edx);
3469 __ movl(edx, eax);
3470 } else {
3471 __ movl(eax, edx);
3472 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003473 } else {
3474 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3475
3476 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3477
3478 CpuRegister rax = eax;
3479 CpuRegister rdx = edx;
3480
3481 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3482
3483 // Save the numerator.
3484 __ movq(numerator, rax);
3485
3486 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003487 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003488
3489 // RDX:RAX = magic * numerator
3490 __ imulq(numerator);
3491
3492 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003493 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494 __ addq(rdx, numerator);
3495 } else if (imm < 0 && magic > 0) {
3496 // RDX -= numerator
3497 __ subq(rdx, numerator);
3498 }
3499
3500 // Shift if needed.
3501 if (shift != 0) {
3502 __ sarq(rdx, Immediate(shift));
3503 }
3504
3505 // RDX += 1 if RDX < 0
3506 __ movq(rax, rdx);
3507 __ shrq(rdx, Immediate(63));
3508 __ addq(rdx, rax);
3509
3510 if (instruction->IsRem()) {
3511 __ movq(rax, numerator);
3512
3513 if (IsInt<32>(imm)) {
3514 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3515 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003516 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003517 }
3518
3519 __ subq(rax, rdx);
3520 __ movq(rdx, rax);
3521 } else {
3522 __ movq(rax, rdx);
3523 }
3524 }
3525}
3526
Calin Juravlebacfec32014-11-14 15:54:36 +00003527void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3528 DCHECK(instruction->IsDiv() || instruction->IsRem());
3529 Primitive::Type type = instruction->GetResultType();
3530 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3531
3532 bool is_div = instruction->IsDiv();
3533 LocationSummary* locations = instruction->GetLocations();
3534
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3536 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003537
Roland Levillain271ab9c2014-11-27 15:23:57 +00003538 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003539 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003540
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003541 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003542 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003543
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003544 if (imm == 0) {
3545 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3546 } else if (imm == 1 || imm == -1) {
3547 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003548 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003549 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003550 } else {
3551 DCHECK(imm <= -2 || imm >= 2);
3552 GenerateDivRemWithAnyConstant(instruction);
3553 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003554 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003555 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003556 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003557 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003558 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003559
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003560 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3561 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3562 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3563 // so it's safe to just use negl instead of more complex comparisons.
3564 if (type == Primitive::kPrimInt) {
3565 __ cmpl(second_reg, Immediate(-1));
3566 __ j(kEqual, slow_path->GetEntryLabel());
3567 // edx:eax <- sign-extended of eax
3568 __ cdq();
3569 // eax = quotient, edx = remainder
3570 __ idivl(second_reg);
3571 } else {
3572 __ cmpq(second_reg, Immediate(-1));
3573 __ j(kEqual, slow_path->GetEntryLabel());
3574 // rdx:rax <- sign-extended of rax
3575 __ cqo();
3576 // rax = quotient, rdx = remainder
3577 __ idivq(second_reg);
3578 }
3579 __ Bind(slow_path->GetExitLabel());
3580 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003581}
3582
Calin Juravle7c4954d2014-10-28 16:57:40 +00003583void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3584 LocationSummary* locations =
3585 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3586 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003587 case Primitive::kPrimInt:
3588 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003589 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003590 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003591 locations->SetOut(Location::SameAsFirstInput());
3592 // Intel uses edx:eax as the dividend.
3593 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003594 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3595 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3596 // output and request another temp.
3597 if (div->InputAt(1)->IsConstant()) {
3598 locations->AddTemp(Location::RequiresRegister());
3599 }
Calin Juravled0d48522014-11-04 16:40:20 +00003600 break;
3601 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003602
Calin Juravle7c4954d2014-10-28 16:57:40 +00003603 case Primitive::kPrimFloat:
3604 case Primitive::kPrimDouble: {
3605 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003606 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003607 locations->SetOut(Location::SameAsFirstInput());
3608 break;
3609 }
3610
3611 default:
3612 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3613 }
3614}
3615
3616void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3617 LocationSummary* locations = div->GetLocations();
3618 Location first = locations->InAt(0);
3619 Location second = locations->InAt(1);
3620 DCHECK(first.Equals(locations->Out()));
3621
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003622 Primitive::Type type = div->GetResultType();
3623 switch (type) {
3624 case Primitive::kPrimInt:
3625 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003626 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003627 break;
3628 }
3629
Calin Juravle7c4954d2014-10-28 16:57:40 +00003630 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003631 if (second.IsFpuRegister()) {
3632 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3633 } else if (second.IsConstant()) {
3634 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003635 codegen_->LiteralFloatAddress(
3636 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003637 } else {
3638 DCHECK(second.IsStackSlot());
3639 __ divss(first.AsFpuRegister<XmmRegister>(),
3640 Address(CpuRegister(RSP), second.GetStackIndex()));
3641 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003642 break;
3643 }
3644
3645 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003646 if (second.IsFpuRegister()) {
3647 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3648 } else if (second.IsConstant()) {
3649 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003650 codegen_->LiteralDoubleAddress(
3651 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003652 } else {
3653 DCHECK(second.IsDoubleStackSlot());
3654 __ divsd(first.AsFpuRegister<XmmRegister>(),
3655 Address(CpuRegister(RSP), second.GetStackIndex()));
3656 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003657 break;
3658 }
3659
3660 default:
3661 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3662 }
3663}
3664
Calin Juravlebacfec32014-11-14 15:54:36 +00003665void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003666 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003667 LocationSummary* locations =
3668 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003669
3670 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003671 case Primitive::kPrimInt:
3672 case Primitive::kPrimLong: {
3673 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003674 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003675 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3676 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003677 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3678 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3679 // output and request another temp.
3680 if (rem->InputAt(1)->IsConstant()) {
3681 locations->AddTemp(Location::RequiresRegister());
3682 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003683 break;
3684 }
3685
3686 case Primitive::kPrimFloat:
3687 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003688 locations->SetInAt(0, Location::Any());
3689 locations->SetInAt(1, Location::Any());
3690 locations->SetOut(Location::RequiresFpuRegister());
3691 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003692 break;
3693 }
3694
3695 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003696 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003697 }
3698}
3699
3700void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3701 Primitive::Type type = rem->GetResultType();
3702 switch (type) {
3703 case Primitive::kPrimInt:
3704 case Primitive::kPrimLong: {
3705 GenerateDivRemIntegral(rem);
3706 break;
3707 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003708 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003709 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003710 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003711 break;
3712 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003713 default:
3714 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3715 }
3716}
3717
Calin Juravled0d48522014-11-04 16:40:20 +00003718void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00003719 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
3720 ? LocationSummary::kCallOnSlowPath
3721 : LocationSummary::kNoCall;
3722 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Calin Juravled0d48522014-11-04 16:40:20 +00003723 locations->SetInAt(0, Location::Any());
3724 if (instruction->HasUses()) {
3725 locations->SetOut(Location::SameAsFirstInput());
3726 }
3727}
3728
3729void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003730 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003731 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3732 codegen_->AddSlowPath(slow_path);
3733
3734 LocationSummary* locations = instruction->GetLocations();
3735 Location value = locations->InAt(0);
3736
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003737 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003738 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003739 case Primitive::kPrimByte:
3740 case Primitive::kPrimChar:
3741 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003742 case Primitive::kPrimInt: {
3743 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003744 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003745 __ j(kEqual, slow_path->GetEntryLabel());
3746 } else if (value.IsStackSlot()) {
3747 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3748 __ j(kEqual, slow_path->GetEntryLabel());
3749 } else {
3750 DCHECK(value.IsConstant()) << value;
3751 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3752 __ jmp(slow_path->GetEntryLabel());
3753 }
3754 }
3755 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003756 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003757 case Primitive::kPrimLong: {
3758 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003759 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003760 __ j(kEqual, slow_path->GetEntryLabel());
3761 } else if (value.IsDoubleStackSlot()) {
3762 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3763 __ j(kEqual, slow_path->GetEntryLabel());
3764 } else {
3765 DCHECK(value.IsConstant()) << value;
3766 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3767 __ jmp(slow_path->GetEntryLabel());
3768 }
3769 }
3770 break;
3771 }
3772 default:
3773 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003774 }
Calin Juravled0d48522014-11-04 16:40:20 +00003775}
3776
Calin Juravle9aec02f2014-11-18 23:06:35 +00003777void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3778 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3779
3780 LocationSummary* locations =
3781 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3782
3783 switch (op->GetResultType()) {
3784 case Primitive::kPrimInt:
3785 case Primitive::kPrimLong: {
3786 locations->SetInAt(0, Location::RequiresRegister());
3787 // The shift count needs to be in CL.
3788 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3789 locations->SetOut(Location::SameAsFirstInput());
3790 break;
3791 }
3792 default:
3793 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3794 }
3795}
3796
3797void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3798 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3799
3800 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003801 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003802 Location second = locations->InAt(1);
3803
3804 switch (op->GetResultType()) {
3805 case Primitive::kPrimInt: {
3806 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003807 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003808 if (op->IsShl()) {
3809 __ shll(first_reg, second_reg);
3810 } else if (op->IsShr()) {
3811 __ sarl(first_reg, second_reg);
3812 } else {
3813 __ shrl(first_reg, second_reg);
3814 }
3815 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003816 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003817 if (op->IsShl()) {
3818 __ shll(first_reg, imm);
3819 } else if (op->IsShr()) {
3820 __ sarl(first_reg, imm);
3821 } else {
3822 __ shrl(first_reg, imm);
3823 }
3824 }
3825 break;
3826 }
3827 case Primitive::kPrimLong: {
3828 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003829 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003830 if (op->IsShl()) {
3831 __ shlq(first_reg, second_reg);
3832 } else if (op->IsShr()) {
3833 __ sarq(first_reg, second_reg);
3834 } else {
3835 __ shrq(first_reg, second_reg);
3836 }
3837 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003838 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003839 if (op->IsShl()) {
3840 __ shlq(first_reg, imm);
3841 } else if (op->IsShr()) {
3842 __ sarq(first_reg, imm);
3843 } else {
3844 __ shrq(first_reg, imm);
3845 }
3846 }
3847 break;
3848 }
3849 default:
3850 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003851 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003852 }
3853}
3854
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003855void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3856 LocationSummary* locations =
3857 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3858
3859 switch (ror->GetResultType()) {
3860 case Primitive::kPrimInt:
3861 case Primitive::kPrimLong: {
3862 locations->SetInAt(0, Location::RequiresRegister());
3863 // The shift count needs to be in CL (unless it is a constant).
3864 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3865 locations->SetOut(Location::SameAsFirstInput());
3866 break;
3867 }
3868 default:
3869 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3870 UNREACHABLE();
3871 }
3872}
3873
3874void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3875 LocationSummary* locations = ror->GetLocations();
3876 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3877 Location second = locations->InAt(1);
3878
3879 switch (ror->GetResultType()) {
3880 case Primitive::kPrimInt:
3881 if (second.IsRegister()) {
3882 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3883 __ rorl(first_reg, second_reg);
3884 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003885 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003886 __ rorl(first_reg, imm);
3887 }
3888 break;
3889 case Primitive::kPrimLong:
3890 if (second.IsRegister()) {
3891 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3892 __ rorq(first_reg, second_reg);
3893 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003894 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003895 __ rorq(first_reg, imm);
3896 }
3897 break;
3898 default:
3899 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3900 UNREACHABLE();
3901 }
3902}
3903
Calin Juravle9aec02f2014-11-18 23:06:35 +00003904void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3905 HandleShift(shl);
3906}
3907
3908void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3909 HandleShift(shl);
3910}
3911
3912void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3913 HandleShift(shr);
3914}
3915
3916void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3917 HandleShift(shr);
3918}
3919
3920void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3921 HandleShift(ushr);
3922}
3923
3924void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3925 HandleShift(ushr);
3926}
3927
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003928void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003929 LocationSummary* locations =
3930 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003931 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003932 if (instruction->IsStringAlloc()) {
3933 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3934 } else {
3935 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3936 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3937 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003938 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003939}
3940
3941void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003942 // Note: if heap poisoning is enabled, the entry point takes cares
3943 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003944 if (instruction->IsStringAlloc()) {
3945 // String is allocated through StringFactory. Call NewEmptyString entry point.
3946 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
3947 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64WordSize);
3948 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3949 __ call(Address(temp, code_offset.SizeValue()));
3950 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3951 } else {
3952 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3953 instruction,
3954 instruction->GetDexPc(),
3955 nullptr);
3956 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3957 DCHECK(!codegen_->IsLeafMethod());
3958 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003959}
3960
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003961void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3962 LocationSummary* locations =
3963 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3964 InvokeRuntimeCallingConvention calling_convention;
3965 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003966 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003967 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003968 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003969}
3970
3971void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3972 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003973 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3974 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003975 // Note: if heap poisoning is enabled, the entry point takes cares
3976 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01003977 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
3978 instruction,
3979 instruction->GetDexPc(),
3980 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003981 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003982
3983 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003984}
3985
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003986void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003987 LocationSummary* locations =
3988 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003989 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3990 if (location.IsStackSlot()) {
3991 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3992 } else if (location.IsDoubleStackSlot()) {
3993 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3994 }
3995 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003996}
3997
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003998void InstructionCodeGeneratorX86_64::VisitParameterValue(
3999 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004000 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004001}
4002
4003void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4004 LocationSummary* locations =
4005 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4006 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4007}
4008
4009void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4010 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4011 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004012}
4013
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004014void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4015 LocationSummary* locations =
4016 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4017 locations->SetInAt(0, Location::RequiresRegister());
4018 locations->SetOut(Location::RequiresRegister());
4019}
4020
4021void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4022 LocationSummary* locations = instruction->GetLocations();
4023 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00004024 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004025 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4026 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
4027 } else {
Nicolas Geoffray88f288e2016-06-29 08:17:52 +00004028 method_offset = mirror::Class::EmbeddedImTableEntryOffset(
4029 instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value();
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004030 }
4031 __ movq(locations->Out().AsRegister<CpuRegister>(),
4032 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
4033}
4034
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004035void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004036 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004037 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004038 locations->SetInAt(0, Location::RequiresRegister());
4039 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004040}
4041
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004042void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4043 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004044 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4045 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004046 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004047 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004048 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004049 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004050 break;
4051
4052 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004053 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004054 break;
4055
4056 default:
4057 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4058 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004059}
4060
David Brazdil66d126e2015-04-03 16:02:44 +01004061void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4062 LocationSummary* locations =
4063 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4064 locations->SetInAt(0, Location::RequiresRegister());
4065 locations->SetOut(Location::SameAsFirstInput());
4066}
4067
4068void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004069 LocationSummary* locations = bool_not->GetLocations();
4070 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4071 locations->Out().AsRegister<CpuRegister>().AsRegister());
4072 Location out = locations->Out();
4073 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4074}
4075
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004076void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004077 LocationSummary* locations =
4078 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004079 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004080 locations->SetInAt(i, Location::Any());
4081 }
4082 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004083}
4084
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004085void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004086 LOG(FATAL) << "Unimplemented";
4087}
4088
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004089void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004090 /*
4091 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004092 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004093 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4094 */
4095 switch (kind) {
4096 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004097 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004098 break;
4099 }
4100 case MemBarrierKind::kAnyStore:
4101 case MemBarrierKind::kLoadAny:
4102 case MemBarrierKind::kStoreStore: {
4103 // nop
4104 break;
4105 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004106 case MemBarrierKind::kNTStoreStore:
4107 // Non-Temporal Store/Store needs an explicit fence.
4108 MemoryFence(/* non-temporal */ true);
4109 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004110 }
4111}
4112
4113void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4114 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4115
Roland Levillain0d5a2812015-11-13 10:07:31 +00004116 bool object_field_get_with_read_barrier =
4117 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004118 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004119 new (GetGraph()->GetArena()) LocationSummary(instruction,
4120 object_field_get_with_read_barrier ?
4121 LocationSummary::kCallOnSlowPath :
4122 LocationSummary::kNoCall);
Calin Juravle52c48962014-12-16 17:02:57 +00004123 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004124 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4125 locations->SetOut(Location::RequiresFpuRegister());
4126 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004127 // The output overlaps for an object field get when read barriers
4128 // are enabled: we do not want the move to overwrite the object's
4129 // location, as we need it to emit the read barrier.
4130 locations->SetOut(
4131 Location::RequiresRegister(),
4132 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004133 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004134 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4135 // We need a temporary register for the read barrier marking slow
4136 // path in CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier.
4137 locations->AddTemp(Location::RequiresRegister());
4138 }
Calin Juravle52c48962014-12-16 17:02:57 +00004139}
4140
4141void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4142 const FieldInfo& field_info) {
4143 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4144
4145 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004146 Location base_loc = locations->InAt(0);
4147 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004148 Location out = locations->Out();
4149 bool is_volatile = field_info.IsVolatile();
4150 Primitive::Type field_type = field_info.GetFieldType();
4151 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4152
4153 switch (field_type) {
4154 case Primitive::kPrimBoolean: {
4155 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4156 break;
4157 }
4158
4159 case Primitive::kPrimByte: {
4160 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4161 break;
4162 }
4163
4164 case Primitive::kPrimShort: {
4165 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4166 break;
4167 }
4168
4169 case Primitive::kPrimChar: {
4170 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4171 break;
4172 }
4173
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004174 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004175 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4176 break;
4177 }
4178
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004179 case Primitive::kPrimNot: {
4180 // /* HeapReference<Object> */ out = *(base + offset)
4181 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4182 Location temp_loc = locations->GetTemp(0);
4183 // Note that a potential implicit null check is handled in this
4184 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4185 codegen_->GenerateFieldLoadWithBakerReadBarrier(
4186 instruction, out, base, offset, temp_loc, /* needs_null_check */ true);
4187 if (is_volatile) {
4188 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4189 }
4190 } else {
4191 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4192 codegen_->MaybeRecordImplicitNullCheck(instruction);
4193 if (is_volatile) {
4194 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4195 }
4196 // If read barriers are enabled, emit read barriers other than
4197 // Baker's using a slow path (and also unpoison the loaded
4198 // reference, if heap poisoning is enabled).
4199 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4200 }
4201 break;
4202 }
4203
Calin Juravle52c48962014-12-16 17:02:57 +00004204 case Primitive::kPrimLong: {
4205 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4206 break;
4207 }
4208
4209 case Primitive::kPrimFloat: {
4210 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4211 break;
4212 }
4213
4214 case Primitive::kPrimDouble: {
4215 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4216 break;
4217 }
4218
4219 case Primitive::kPrimVoid:
4220 LOG(FATAL) << "Unreachable type " << field_type;
4221 UNREACHABLE();
4222 }
4223
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004224 if (field_type == Primitive::kPrimNot) {
4225 // Potential implicit null checks, in the case of reference
4226 // fields, are handled in the previous switch statement.
4227 } else {
4228 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004229 }
Roland Levillain4d027112015-07-01 15:41:14 +01004230
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004231 if (is_volatile) {
4232 if (field_type == Primitive::kPrimNot) {
4233 // Memory barriers, in the case of references, are also handled
4234 // in the previous switch statement.
4235 } else {
4236 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4237 }
Roland Levillain4d027112015-07-01 15:41:14 +01004238 }
Calin Juravle52c48962014-12-16 17:02:57 +00004239}
4240
4241void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4242 const FieldInfo& field_info) {
4243 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4244
4245 LocationSummary* locations =
4246 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004247 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004248 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004249 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004250 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004251
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004252 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004253 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004254 if (is_volatile) {
4255 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4256 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4257 } else {
4258 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4259 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004260 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004261 if (is_volatile) {
4262 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4263 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4264 } else {
4265 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4266 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004267 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004268 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004269 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004270 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004271 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004272 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4273 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004274 locations->AddTemp(Location::RequiresRegister());
4275 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004276}
4277
Calin Juravle52c48962014-12-16 17:02:57 +00004278void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004279 const FieldInfo& field_info,
4280 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004281 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4282
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004283 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004284 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4285 Location value = locations->InAt(1);
4286 bool is_volatile = field_info.IsVolatile();
4287 Primitive::Type field_type = field_info.GetFieldType();
4288 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4289
4290 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004291 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004292 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004293
Mark Mendellea5af682015-10-22 17:35:49 -04004294 bool maybe_record_implicit_null_check_done = false;
4295
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004296 switch (field_type) {
4297 case Primitive::kPrimBoolean:
4298 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004299 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004300 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004301 __ movb(Address(base, offset), Immediate(v));
4302 } else {
4303 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4304 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004305 break;
4306 }
4307
4308 case Primitive::kPrimShort:
4309 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004310 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004311 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004312 __ movw(Address(base, offset), Immediate(v));
4313 } else {
4314 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4315 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004316 break;
4317 }
4318
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004319 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004320 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004321 if (value.IsConstant()) {
4322 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004323 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4324 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4325 // Note: if heap poisoning is enabled, no need to poison
4326 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004327 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004328 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004329 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4330 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4331 __ movl(temp, value.AsRegister<CpuRegister>());
4332 __ PoisonHeapReference(temp);
4333 __ movl(Address(base, offset), temp);
4334 } else {
4335 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4336 }
Mark Mendell40741f32015-04-20 22:10:34 -04004337 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004338 break;
4339 }
4340
4341 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004342 if (value.IsConstant()) {
4343 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004344 codegen_->MoveInt64ToAddress(Address(base, offset),
4345 Address(base, offset + sizeof(int32_t)),
4346 v,
4347 instruction);
4348 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004349 } else {
4350 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4351 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004352 break;
4353 }
4354
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004355 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004356 if (value.IsConstant()) {
4357 int32_t v =
4358 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4359 __ movl(Address(base, offset), Immediate(v));
4360 } else {
4361 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4362 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004363 break;
4364 }
4365
4366 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004367 if (value.IsConstant()) {
4368 int64_t v =
4369 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4370 codegen_->MoveInt64ToAddress(Address(base, offset),
4371 Address(base, offset + sizeof(int32_t)),
4372 v,
4373 instruction);
4374 maybe_record_implicit_null_check_done = true;
4375 } else {
4376 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4377 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004378 break;
4379 }
4380
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004381 case Primitive::kPrimVoid:
4382 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004383 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004384 }
Calin Juravle52c48962014-12-16 17:02:57 +00004385
Mark Mendellea5af682015-10-22 17:35:49 -04004386 if (!maybe_record_implicit_null_check_done) {
4387 codegen_->MaybeRecordImplicitNullCheck(instruction);
4388 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004389
4390 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4391 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4392 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004393 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004394 }
4395
Calin Juravle52c48962014-12-16 17:02:57 +00004396 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004397 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004398 }
4399}
4400
4401void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4402 HandleFieldSet(instruction, instruction->GetFieldInfo());
4403}
4404
4405void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004406 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004407}
4408
4409void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004410 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004411}
4412
4413void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004414 HandleFieldGet(instruction, instruction->GetFieldInfo());
4415}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004416
Calin Juravle52c48962014-12-16 17:02:57 +00004417void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4418 HandleFieldGet(instruction);
4419}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004420
Calin Juravle52c48962014-12-16 17:02:57 +00004421void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4422 HandleFieldGet(instruction, instruction->GetFieldInfo());
4423}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004424
Calin Juravle52c48962014-12-16 17:02:57 +00004425void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4426 HandleFieldSet(instruction, instruction->GetFieldInfo());
4427}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004428
Calin Juravle52c48962014-12-16 17:02:57 +00004429void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004430 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004431}
4432
Calin Juravlee460d1d2015-09-29 04:52:17 +01004433void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4434 HUnresolvedInstanceFieldGet* instruction) {
4435 FieldAccessCallingConventionX86_64 calling_convention;
4436 codegen_->CreateUnresolvedFieldLocationSummary(
4437 instruction, instruction->GetFieldType(), calling_convention);
4438}
4439
4440void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4441 HUnresolvedInstanceFieldGet* instruction) {
4442 FieldAccessCallingConventionX86_64 calling_convention;
4443 codegen_->GenerateUnresolvedFieldAccess(instruction,
4444 instruction->GetFieldType(),
4445 instruction->GetFieldIndex(),
4446 instruction->GetDexPc(),
4447 calling_convention);
4448}
4449
4450void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4451 HUnresolvedInstanceFieldSet* instruction) {
4452 FieldAccessCallingConventionX86_64 calling_convention;
4453 codegen_->CreateUnresolvedFieldLocationSummary(
4454 instruction, instruction->GetFieldType(), calling_convention);
4455}
4456
4457void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4458 HUnresolvedInstanceFieldSet* instruction) {
4459 FieldAccessCallingConventionX86_64 calling_convention;
4460 codegen_->GenerateUnresolvedFieldAccess(instruction,
4461 instruction->GetFieldType(),
4462 instruction->GetFieldIndex(),
4463 instruction->GetDexPc(),
4464 calling_convention);
4465}
4466
4467void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4468 HUnresolvedStaticFieldGet* instruction) {
4469 FieldAccessCallingConventionX86_64 calling_convention;
4470 codegen_->CreateUnresolvedFieldLocationSummary(
4471 instruction, instruction->GetFieldType(), calling_convention);
4472}
4473
4474void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4475 HUnresolvedStaticFieldGet* instruction) {
4476 FieldAccessCallingConventionX86_64 calling_convention;
4477 codegen_->GenerateUnresolvedFieldAccess(instruction,
4478 instruction->GetFieldType(),
4479 instruction->GetFieldIndex(),
4480 instruction->GetDexPc(),
4481 calling_convention);
4482}
4483
4484void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4485 HUnresolvedStaticFieldSet* instruction) {
4486 FieldAccessCallingConventionX86_64 calling_convention;
4487 codegen_->CreateUnresolvedFieldLocationSummary(
4488 instruction, instruction->GetFieldType(), calling_convention);
4489}
4490
4491void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4492 HUnresolvedStaticFieldSet* instruction) {
4493 FieldAccessCallingConventionX86_64 calling_convention;
4494 codegen_->GenerateUnresolvedFieldAccess(instruction,
4495 instruction->GetFieldType(),
4496 instruction->GetFieldIndex(),
4497 instruction->GetDexPc(),
4498 calling_convention);
4499}
4500
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004501void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004502 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4503 ? LocationSummary::kCallOnSlowPath
4504 : LocationSummary::kNoCall;
4505 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
4506 Location loc = codegen_->IsImplicitNullCheckAllowed(instruction)
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004507 ? Location::RequiresRegister()
4508 : Location::Any();
4509 locations->SetInAt(0, loc);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004510 if (instruction->HasUses()) {
4511 locations->SetOut(Location::SameAsFirstInput());
4512 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004513}
4514
Calin Juravle2ae48182016-03-16 14:05:09 +00004515void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4516 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004517 return;
4518 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004519 LocationSummary* locations = instruction->GetLocations();
4520 Location obj = locations->InAt(0);
4521
4522 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004523 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004524}
4525
Calin Juravle2ae48182016-03-16 14:05:09 +00004526void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004527 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004528 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004529
4530 LocationSummary* locations = instruction->GetLocations();
4531 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004532
4533 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004534 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004535 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004536 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004537 } else {
4538 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004539 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004540 __ jmp(slow_path->GetEntryLabel());
4541 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004542 }
4543 __ j(kEqual, slow_path->GetEntryLabel());
4544}
4545
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004546void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004547 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004548}
4549
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004550void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004551 bool object_array_get_with_read_barrier =
4552 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004553 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004554 new (GetGraph()->GetArena()) LocationSummary(instruction,
4555 object_array_get_with_read_barrier ?
4556 LocationSummary::kCallOnSlowPath :
4557 LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004558 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004559 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004560 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4561 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4562 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004563 // The output overlaps for an object array get when read barriers
4564 // are enabled: we do not want the move to overwrite the array's
4565 // location, as we need it to emit the read barrier.
4566 locations->SetOut(
4567 Location::RequiresRegister(),
4568 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004569 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004570 // We need a temporary register for the read barrier marking slow
4571 // path in CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier.
4572 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
4573 locations->AddTemp(Location::RequiresRegister());
4574 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004575}
4576
4577void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4578 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004579 Location obj_loc = locations->InAt(0);
4580 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004581 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004582 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004583 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004584
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004585 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004586 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004587 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004588 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004589 if (index.IsConstant()) {
4590 __ movzxb(out, Address(obj,
4591 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4592 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004593 __ movzxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004594 }
4595 break;
4596 }
4597
4598 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004599 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004600 if (index.IsConstant()) {
4601 __ movsxb(out, Address(obj,
4602 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
4603 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004604 __ movsxb(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004605 }
4606 break;
4607 }
4608
4609 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004610 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004611 if (index.IsConstant()) {
4612 __ movsxw(out, Address(obj,
4613 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4614 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004615 __ movsxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004616 }
4617 break;
4618 }
4619
4620 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004621 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004622 if (index.IsConstant()) {
4623 __ movzxw(out, Address(obj,
4624 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
4625 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004626 __ movzxw(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004627 }
4628 break;
4629 }
4630
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004631 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004632 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004633 if (index.IsConstant()) {
4634 __ movl(out, Address(obj,
4635 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4636 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004637 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004638 }
4639 break;
4640 }
4641
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004642 case Primitive::kPrimNot: {
4643 static_assert(
4644 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4645 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004646 // /* HeapReference<Object> */ out =
4647 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4648 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4649 Location temp = locations->GetTemp(0);
4650 // Note that a potential implicit null check is handled in this
4651 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4652 codegen_->GenerateArrayLoadWithBakerReadBarrier(
4653 instruction, out_loc, obj, data_offset, index, temp, /* needs_null_check */ true);
4654 } else {
4655 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4656 if (index.IsConstant()) {
4657 uint32_t offset =
4658 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
4659 __ movl(out, Address(obj, offset));
4660 codegen_->MaybeRecordImplicitNullCheck(instruction);
4661 // If read barriers are enabled, emit read barriers other than
4662 // Baker's using a slow path (and also unpoison the loaded
4663 // reference, if heap poisoning is enabled).
4664 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4665 } else {
4666 __ movl(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
4667 codegen_->MaybeRecordImplicitNullCheck(instruction);
4668 // If read barriers are enabled, emit read barriers other than
4669 // Baker's using a slow path (and also unpoison the loaded
4670 // reference, if heap poisoning is enabled).
4671 codegen_->MaybeGenerateReadBarrierSlow(
4672 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4673 }
4674 }
4675 break;
4676 }
4677
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004678 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004679 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004680 if (index.IsConstant()) {
4681 __ movq(out, Address(obj,
4682 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4683 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004684 __ movq(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004685 }
4686 break;
4687 }
4688
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004689 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004690 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004691 if (index.IsConstant()) {
4692 __ movss(out, Address(obj,
4693 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
4694 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004695 __ movss(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004696 }
4697 break;
4698 }
4699
4700 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004701 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004702 if (index.IsConstant()) {
4703 __ movsd(out, Address(obj,
4704 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
4705 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004706 __ movsd(out, Address(obj, index.AsRegister<CpuRegister>(), TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004707 }
4708 break;
4709 }
4710
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004711 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004712 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004713 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004714 }
Roland Levillain4d027112015-07-01 15:41:14 +01004715
4716 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004717 // Potential implicit null checks, in the case of reference
4718 // arrays, are handled in the previous switch statement.
4719 } else {
4720 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004721 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004722}
4723
4724void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004725 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004726
4727 bool needs_write_barrier =
4728 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004729 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004730 bool object_array_set_with_read_barrier =
4731 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004732
Nicolas Geoffray39468442014-09-02 15:17:15 +01004733 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004734 instruction,
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004735 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004736 LocationSummary::kCallOnSlowPath :
4737 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004738
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004739 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004740 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4741 if (Primitive::IsFloatingPointType(value_type)) {
4742 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004743 } else {
4744 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4745 }
4746
4747 if (needs_write_barrier) {
4748 // Temporary registers for the write barrier.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004749
4750 // This first temporary register is possibly used for heap
4751 // reference poisoning and/or read barrier emission too.
4752 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004753 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004754 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004755}
4756
4757void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4758 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004759 Location array_loc = locations->InAt(0);
4760 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004761 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004762 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004763 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004764 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004765 bool needs_write_barrier =
4766 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004767 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4768 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4769 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004770
4771 switch (value_type) {
4772 case Primitive::kPrimBoolean:
4773 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004774 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
4775 Address address = index.IsConstant()
4776 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + offset)
4777 : Address(array, index.AsRegister<CpuRegister>(), TIMES_1, offset);
4778 if (value.IsRegister()) {
4779 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004780 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004781 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004782 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004783 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004784 break;
4785 }
4786
4787 case Primitive::kPrimShort:
4788 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004789 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
4790 Address address = index.IsConstant()
4791 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + offset)
4792 : Address(array, index.AsRegister<CpuRegister>(), TIMES_2, offset);
4793 if (value.IsRegister()) {
4794 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004795 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004796 DCHECK(value.IsConstant()) << value;
4797 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004798 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004799 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004800 break;
4801 }
4802
4803 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004804 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4805 Address address = index.IsConstant()
4806 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4807 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004808
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004809 if (!value.IsRegister()) {
4810 // Just setting null.
4811 DCHECK(instruction->InputAt(2)->IsNullConstant());
4812 DCHECK(value.IsConstant()) << value;
4813 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004814 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004815 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004816 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004817 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004818 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004819
4820 DCHECK(needs_write_barrier);
4821 CpuRegister register_value = value.AsRegister<CpuRegister>();
4822 NearLabel done, not_null, do_put;
4823 SlowPathCode* slow_path = nullptr;
4824 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004825 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004826 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4827 codegen_->AddSlowPath(slow_path);
4828 if (instruction->GetValueCanBeNull()) {
4829 __ testl(register_value, register_value);
4830 __ j(kNotEqual, &not_null);
4831 __ movl(address, Immediate(0));
4832 codegen_->MaybeRecordImplicitNullCheck(instruction);
4833 __ jmp(&done);
4834 __ Bind(&not_null);
4835 }
4836
Roland Levillain0d5a2812015-11-13 10:07:31 +00004837 if (kEmitCompilerReadBarrier) {
4838 // When read barriers are enabled, the type checking
4839 // instrumentation requires two read barriers:
4840 //
4841 // __ movl(temp2, temp);
4842 // // /* HeapReference<Class> */ temp = temp->component_type_
4843 // __ movl(temp, Address(temp, component_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004844 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004845 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
4846 //
4847 // // /* HeapReference<Class> */ temp2 = register_value->klass_
4848 // __ movl(temp2, Address(register_value, class_offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004849 // codegen_->GenerateReadBarrierSlow(
Roland Levillain0d5a2812015-11-13 10:07:31 +00004850 // instruction, temp2_loc, temp2_loc, value, class_offset, temp_loc);
4851 //
4852 // __ cmpl(temp, temp2);
4853 //
4854 // However, the second read barrier may trash `temp`, as it
4855 // is a temporary register, and as such would not be saved
4856 // along with live registers before calling the runtime (nor
4857 // restored afterwards). So in this case, we bail out and
4858 // delegate the work to the array set slow path.
4859 //
4860 // TODO: Extend the register allocator to support a new
4861 // "(locally) live temp" location so as to avoid always
4862 // going into the slow path when read barriers are enabled.
4863 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004864 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004865 // /* HeapReference<Class> */ temp = array->klass_
4866 __ movl(temp, Address(array, class_offset));
4867 codegen_->MaybeRecordImplicitNullCheck(instruction);
4868 __ MaybeUnpoisonHeapReference(temp);
4869
4870 // /* HeapReference<Class> */ temp = temp->component_type_
4871 __ movl(temp, Address(temp, component_offset));
4872 // If heap poisoning is enabled, no need to unpoison `temp`
4873 // nor the object reference in `register_value->klass`, as
4874 // we are comparing two poisoned references.
4875 __ cmpl(temp, Address(register_value, class_offset));
4876
4877 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4878 __ j(kEqual, &do_put);
4879 // If heap poisoning is enabled, the `temp` reference has
4880 // not been unpoisoned yet; unpoison it now.
4881 __ MaybeUnpoisonHeapReference(temp);
4882
4883 // /* HeapReference<Class> */ temp = temp->super_class_
4884 __ movl(temp, Address(temp, super_offset));
4885 // If heap poisoning is enabled, no need to unpoison
4886 // `temp`, as we are comparing against null below.
4887 __ testl(temp, temp);
4888 __ j(kNotEqual, slow_path->GetEntryLabel());
4889 __ Bind(&do_put);
4890 } else {
4891 __ j(kNotEqual, slow_path->GetEntryLabel());
4892 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004893 }
4894 }
4895
4896 if (kPoisonHeapReferences) {
4897 __ movl(temp, register_value);
4898 __ PoisonHeapReference(temp);
4899 __ movl(address, temp);
4900 } else {
4901 __ movl(address, register_value);
4902 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004903 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004904 codegen_->MaybeRecordImplicitNullCheck(instruction);
4905 }
4906
4907 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4908 codegen_->MarkGCCard(
4909 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4910 __ Bind(&done);
4911
4912 if (slow_path != nullptr) {
4913 __ Bind(slow_path->GetExitLabel());
4914 }
4915
4916 break;
4917 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004918
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004919 case Primitive::kPrimInt: {
4920 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
4921 Address address = index.IsConstant()
4922 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4923 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
4924 if (value.IsRegister()) {
4925 __ movl(address, value.AsRegister<CpuRegister>());
4926 } else {
4927 DCHECK(value.IsConstant()) << value;
4928 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4929 __ movl(address, Immediate(v));
4930 }
4931 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004932 break;
4933 }
4934
4935 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004936 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
4937 Address address = index.IsConstant()
4938 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4939 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
4940 if (value.IsRegister()) {
4941 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004942 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004943 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004944 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004945 Address address_high = index.IsConstant()
4946 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4947 offset + sizeof(int32_t))
4948 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4949 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004950 }
4951 break;
4952 }
4953
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004954 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004955 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
4956 Address address = index.IsConstant()
4957 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + offset)
4958 : Address(array, index.AsRegister<CpuRegister>(), TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004959 if (value.IsFpuRegister()) {
4960 __ movss(address, value.AsFpuRegister<XmmRegister>());
4961 } else {
4962 DCHECK(value.IsConstant());
4963 int32_t v =
4964 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4965 __ movl(address, Immediate(v));
4966 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004967 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004968 break;
4969 }
4970
4971 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004972 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
4973 Address address = index.IsConstant()
4974 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + offset)
4975 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004976 if (value.IsFpuRegister()) {
4977 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4978 codegen_->MaybeRecordImplicitNullCheck(instruction);
4979 } else {
4980 int64_t v =
4981 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4982 Address address_high = index.IsConstant()
4983 ? Address(array, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) +
4984 offset + sizeof(int32_t))
4985 : Address(array, index.AsRegister<CpuRegister>(), TIMES_8, offset + sizeof(int32_t));
4986 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4987 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004988 break;
4989 }
4990
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004991 case Primitive::kPrimVoid:
4992 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004993 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004994 }
4995}
4996
4997void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004998 LocationSummary* locations =
4999 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005000 locations->SetInAt(0, Location::RequiresRegister());
5001 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005002}
5003
5004void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
5005 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005006 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005007 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5008 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005009 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005010 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005011}
5012
5013void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00005014 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
5015 ? LocationSummary::kCallOnSlowPath
5016 : LocationSummary::kNoCall;
5017 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005018 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendell99dbd682015-04-22 16:18:52 -04005019 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005020 if (instruction->HasUses()) {
5021 locations->SetOut(Location::SameAsFirstInput());
5022 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005023}
5024
5025void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5026 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005027 Location index_loc = locations->InAt(0);
5028 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005029 SlowPathCode* slow_path =
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005030 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005031
Mark Mendell99dbd682015-04-22 16:18:52 -04005032 if (length_loc.IsConstant()) {
5033 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5034 if (index_loc.IsConstant()) {
5035 // BCE will remove the bounds check if we are guarenteed to pass.
5036 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5037 if (index < 0 || index >= length) {
5038 codegen_->AddSlowPath(slow_path);
5039 __ jmp(slow_path->GetEntryLabel());
5040 } else {
5041 // Some optimization after BCE may have generated this, and we should not
5042 // generate a bounds check if it is a valid range.
5043 }
5044 return;
5045 }
5046
5047 // We have to reverse the jump condition because the length is the constant.
5048 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5049 __ cmpl(index_reg, Immediate(length));
5050 codegen_->AddSlowPath(slow_path);
5051 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005052 } else {
Mark Mendell99dbd682015-04-22 16:18:52 -04005053 CpuRegister length = length_loc.AsRegister<CpuRegister>();
5054 if (index_loc.IsConstant()) {
5055 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5056 __ cmpl(length, Immediate(value));
5057 } else {
5058 __ cmpl(length, index_loc.AsRegister<CpuRegister>());
5059 }
5060 codegen_->AddSlowPath(slow_path);
5061 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005062 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005063}
5064
5065void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5066 CpuRegister card,
5067 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005068 CpuRegister value,
5069 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005070 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005071 if (value_can_be_null) {
5072 __ testl(value, value);
5073 __ j(kEqual, &is_null);
5074 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005075 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64WordSize>().Int32Value(),
5076 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005077 __ movq(temp, object);
5078 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005079 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005080 if (value_can_be_null) {
5081 __ Bind(&is_null);
5082 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005083}
5084
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005085void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005086 LOG(FATAL) << "Unimplemented";
5087}
5088
5089void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005090 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5091}
5092
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005093void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
5094 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
5095}
5096
5097void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005098 HBasicBlock* block = instruction->GetBlock();
5099 if (block->GetLoopInformation() != nullptr) {
5100 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5101 // The back edge will generate the suspend check.
5102 return;
5103 }
5104 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5105 // The goto will generate the suspend check.
5106 return;
5107 }
5108 GenerateSuspendCheck(instruction, nullptr);
5109}
5110
5111void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5112 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005113 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005114 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5115 if (slow_path == nullptr) {
5116 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5117 instruction->SetSlowPath(slow_path);
5118 codegen_->AddSlowPath(slow_path);
5119 if (successor != nullptr) {
5120 DCHECK(successor->IsLoopHeader());
5121 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5122 }
5123 } else {
5124 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5125 }
5126
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005127 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(),
5128 /* no_rip */ true),
5129 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005130 if (successor == nullptr) {
5131 __ j(kNotEqual, slow_path->GetEntryLabel());
5132 __ Bind(slow_path->GetReturnLabel());
5133 } else {
5134 __ j(kEqual, codegen_->GetLabelOf(successor));
5135 __ jmp(slow_path->GetEntryLabel());
5136 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005137}
5138
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005139X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5140 return codegen_->GetAssembler();
5141}
5142
5143void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005144 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005145 Location source = move->GetSource();
5146 Location destination = move->GetDestination();
5147
5148 if (source.IsRegister()) {
5149 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005150 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005151 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005152 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005153 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005154 } else {
5155 DCHECK(destination.IsDoubleStackSlot());
5156 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005157 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005158 }
5159 } else if (source.IsStackSlot()) {
5160 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005161 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005162 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005163 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005164 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005165 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005166 } else {
5167 DCHECK(destination.IsStackSlot());
5168 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5169 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5170 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005171 } else if (source.IsDoubleStackSlot()) {
5172 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005173 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005174 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005175 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005176 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5177 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005178 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005179 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005180 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5181 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5182 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005183 } else if (source.IsConstant()) {
5184 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005185 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5186 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005187 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005188 if (value == 0) {
5189 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5190 } else {
5191 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5192 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005193 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005194 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005195 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005196 }
5197 } else if (constant->IsLongConstant()) {
5198 int64_t value = constant->AsLongConstant()->GetValue();
5199 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005200 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005201 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005202 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005203 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005204 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005205 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005206 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005207 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005208 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005209 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005210 } else {
5211 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005212 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005213 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5214 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005215 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005216 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005217 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005218 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005219 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005220 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005221 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005222 } else {
5223 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005224 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005225 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005226 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005227 } else if (source.IsFpuRegister()) {
5228 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005229 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005230 } else if (destination.IsStackSlot()) {
5231 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005232 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005233 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005234 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005235 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005236 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005237 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005238 }
5239}
5240
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005241void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005242 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005243 __ movl(Address(CpuRegister(RSP), mem), reg);
5244 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005245}
5246
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005247void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005248 ScratchRegisterScope ensure_scratch(
5249 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5250
5251 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5252 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5253 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5254 Address(CpuRegister(RSP), mem2 + stack_offset));
5255 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5256 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5257 CpuRegister(ensure_scratch.GetRegister()));
5258}
5259
Mark Mendell8a1c7282015-06-29 15:41:28 -04005260void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5261 __ movq(CpuRegister(TMP), reg1);
5262 __ movq(reg1, reg2);
5263 __ movq(reg2, CpuRegister(TMP));
5264}
5265
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005266void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5267 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5268 __ movq(Address(CpuRegister(RSP), mem), reg);
5269 __ movq(reg, CpuRegister(TMP));
5270}
5271
5272void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5273 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005274 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005275
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005276 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5277 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5278 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5279 Address(CpuRegister(RSP), mem2 + stack_offset));
5280 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5281 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5282 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005283}
5284
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5286 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5287 __ movss(Address(CpuRegister(RSP), mem), reg);
5288 __ movd(reg, CpuRegister(TMP));
5289}
5290
5291void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5292 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5293 __ movsd(Address(CpuRegister(RSP), mem), reg);
5294 __ movd(reg, CpuRegister(TMP));
5295}
5296
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005297void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005298 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005299 Location source = move->GetSource();
5300 Location destination = move->GetDestination();
5301
5302 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005303 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005304 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005305 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005306 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005307 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005308 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005309 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5310 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005311 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005312 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005313 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005314 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5315 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005316 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005317 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5318 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5319 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005320 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005321 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005322 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005323 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005324 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005325 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005326 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005327 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005328 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005329 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005330 }
5331}
5332
5333
5334void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5335 __ pushq(CpuRegister(reg));
5336}
5337
5338
5339void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5340 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005341}
5342
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005343void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005344 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005345 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5346 Immediate(mirror::Class::kStatusInitialized));
5347 __ j(kLess, slow_path->GetEntryLabel());
5348 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005349 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005350}
5351
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005352HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5353 HLoadClass::LoadKind desired_class_load_kind) {
5354 if (kEmitCompilerReadBarrier) {
5355 switch (desired_class_load_kind) {
5356 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5357 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5358 case HLoadClass::LoadKind::kBootImageAddress:
5359 // TODO: Implement for read barrier.
5360 return HLoadClass::LoadKind::kDexCacheViaMethod;
5361 default:
5362 break;
5363 }
5364 }
5365 switch (desired_class_load_kind) {
5366 case HLoadClass::LoadKind::kReferrersClass:
5367 break;
5368 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5369 DCHECK(!GetCompilerOptions().GetCompilePic());
5370 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5371 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5372 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5373 DCHECK(GetCompilerOptions().GetCompilePic());
5374 break;
5375 case HLoadClass::LoadKind::kBootImageAddress:
5376 break;
5377 case HLoadClass::LoadKind::kDexCacheAddress:
5378 DCHECK(Runtime::Current()->UseJitCompilation());
5379 break;
5380 case HLoadClass::LoadKind::kDexCachePcRelative:
5381 DCHECK(!Runtime::Current()->UseJitCompilation());
5382 break;
5383 case HLoadClass::LoadKind::kDexCacheViaMethod:
5384 break;
5385 }
5386 return desired_class_load_kind;
5387}
5388
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005389void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005390 if (cls->NeedsAccessCheck()) {
5391 InvokeRuntimeCallingConvention calling_convention;
5392 CodeGenerator::CreateLoadClassLocationSummary(
5393 cls,
5394 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5395 Location::RegisterLocation(RAX),
5396 /* code_generator_supports_read_barrier */ true);
5397 return;
5398 }
5399
5400 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
5401 ? LocationSummary::kCallOnSlowPath
5402 : LocationSummary::kNoCall;
5403 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
5404 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5405 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5406 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5407 locations->SetInAt(0, Location::RequiresRegister());
5408 }
5409 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005410}
5411
5412void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005413 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005414 if (cls->NeedsAccessCheck()) {
5415 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
5416 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
5417 cls,
5418 cls->GetDexPc(),
5419 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005420 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005421 return;
5422 }
5423
Roland Levillain0d5a2812015-11-13 10:07:31 +00005424 Location out_loc = locations->Out();
5425 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005426
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005427 bool generate_null_check = false;
5428 switch (cls->GetLoadKind()) {
5429 case HLoadClass::LoadKind::kReferrersClass: {
5430 DCHECK(!cls->CanCallRuntime());
5431 DCHECK(!cls->MustGenerateClinitCheck());
5432 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5433 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5434 GenerateGcRootFieldLoad(
5435 cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5436 break;
5437 }
5438 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5439 DCHECK(!kEmitCompilerReadBarrier);
5440 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5441 codegen_->RecordTypePatch(cls);
5442 break;
5443 case HLoadClass::LoadKind::kBootImageAddress: {
5444 DCHECK(!kEmitCompilerReadBarrier);
5445 DCHECK_NE(cls->GetAddress(), 0u);
5446 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5447 __ movl(out, Immediate(address)); // Zero-extended.
5448 codegen_->RecordSimplePatch();
5449 break;
5450 }
5451 case HLoadClass::LoadKind::kDexCacheAddress: {
5452 DCHECK_NE(cls->GetAddress(), 0u);
5453 // /* GcRoot<mirror::Class> */ out = *address
5454 if (IsUint<32>(cls->GetAddress())) {
5455 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
5456 GenerateGcRootFieldLoad(cls, out_loc, address);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005457 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005458 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5459 __ movq(out, Immediate(cls->GetAddress()));
5460 GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005461 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005462 generate_null_check = !cls->IsInDexCache();
5463 break;
5464 }
5465 case HLoadClass::LoadKind::kDexCachePcRelative: {
5466 uint32_t offset = cls->GetDexCacheElementOffset();
5467 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5468 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5469 /* no_rip */ false);
5470 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5471 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label);
5472 generate_null_check = !cls->IsInDexCache();
5473 break;
5474 }
5475 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5476 // /* GcRoot<mirror::Class>[] */ out =
5477 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5478 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5479 __ movq(out,
5480 Address(current_method,
5481 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5482 // /* GcRoot<mirror::Class> */ out = out[type_index]
5483 GenerateGcRootFieldLoad(
5484 cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
5485 generate_null_check = !cls->IsInDexCache();
5486 break;
5487 }
5488 default:
5489 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5490 UNREACHABLE();
5491 }
5492
5493 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5494 DCHECK(cls->CanCallRuntime());
5495 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5496 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5497 codegen_->AddSlowPath(slow_path);
5498 if (generate_null_check) {
5499 __ testl(out, out);
5500 __ j(kEqual, slow_path->GetEntryLabel());
5501 }
5502 if (cls->MustGenerateClinitCheck()) {
5503 GenerateClassInitializationCheck(slow_path, out);
5504 } else {
5505 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005506 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005507 }
5508}
5509
5510void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5511 LocationSummary* locations =
5512 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5513 locations->SetInAt(0, Location::RequiresRegister());
5514 if (check->HasUses()) {
5515 locations->SetOut(Location::SameAsFirstInput());
5516 }
5517}
5518
5519void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005520 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005521 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005522 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005523 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005524 GenerateClassInitializationCheck(slow_path,
5525 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005526}
5527
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005528HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5529 HLoadString::LoadKind desired_string_load_kind) {
5530 if (kEmitCompilerReadBarrier) {
5531 switch (desired_string_load_kind) {
5532 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5533 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5534 case HLoadString::LoadKind::kBootImageAddress:
5535 // TODO: Implement for read barrier.
5536 return HLoadString::LoadKind::kDexCacheViaMethod;
5537 default:
5538 break;
5539 }
5540 }
5541 switch (desired_string_load_kind) {
5542 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5543 DCHECK(!GetCompilerOptions().GetCompilePic());
5544 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5545 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5546 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5547 DCHECK(GetCompilerOptions().GetCompilePic());
5548 break;
5549 case HLoadString::LoadKind::kBootImageAddress:
5550 break;
5551 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005552 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005553 break;
5554 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005555 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005556 break;
5557 case HLoadString::LoadKind::kDexCacheViaMethod:
5558 break;
5559 }
5560 return desired_string_load_kind;
5561}
5562
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005563void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005564 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005565 ? LocationSummary::kCallOnSlowPath
5566 : LocationSummary::kNoCall;
5567 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005568 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5569 locations->SetInAt(0, Location::RequiresRegister());
5570 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005571 locations->SetOut(Location::RequiresRegister());
5572}
5573
5574void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005575 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005576 Location out_loc = locations->Out();
5577 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005578
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005579 switch (load->GetLoadKind()) {
5580 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5581 DCHECK(!kEmitCompilerReadBarrier);
5582 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5583 codegen_->RecordStringPatch(load);
5584 return; // No dex cache slow path.
5585 }
5586 case HLoadString::LoadKind::kBootImageAddress: {
5587 DCHECK(!kEmitCompilerReadBarrier);
5588 DCHECK_NE(load->GetAddress(), 0u);
5589 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5590 __ movl(out, Immediate(address)); // Zero-extended.
5591 codegen_->RecordSimplePatch();
5592 return; // No dex cache slow path.
5593 }
5594 case HLoadString::LoadKind::kDexCacheAddress: {
5595 DCHECK_NE(load->GetAddress(), 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005596 // /* GcRoot<mirror::String> */ out = *address
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005597 if (IsUint<32>(load->GetAddress())) {
5598 Address address = Address::Absolute(load->GetAddress(), /* no_rip */ true);
5599 GenerateGcRootFieldLoad(load, out_loc, address);
5600 } else {
5601 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5602 __ movq(out, Immediate(load->GetAddress()));
5603 GenerateGcRootFieldLoad(load, out_loc, Address(out, 0));
5604 }
5605 break;
5606 }
5607 case HLoadString::LoadKind::kDexCachePcRelative: {
5608 uint32_t offset = load->GetDexCacheElementOffset();
5609 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(load->GetDexFile(), offset);
5610 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5611 /* no_rip */ false);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005612 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005613 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label);
5614 break;
5615 }
5616 case HLoadString::LoadKind::kDexCacheViaMethod: {
5617 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5618
5619 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5620 GenerateGcRootFieldLoad(
5621 load, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
5622 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
5623 __ movq(out, Address(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
5624 // /* GcRoot<mirror::String> */ out = out[string_index]
5625 GenerateGcRootFieldLoad(
5626 load, out_loc, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
5627 break;
5628 }
5629 default:
5630 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
5631 UNREACHABLE();
5632 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005633
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005634 if (!load->IsInDexCache()) {
5635 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5636 codegen_->AddSlowPath(slow_path);
5637 __ testl(out, out);
5638 __ j(kEqual, slow_path->GetEntryLabel());
5639 __ Bind(slow_path->GetExitLabel());
5640 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005641}
5642
David Brazdilcb1c0552015-08-04 16:22:25 +01005643static Address GetExceptionTlsAddress() {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005644 return Address::Absolute(Thread::ExceptionOffset<kX86_64WordSize>().Int32Value(),
5645 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005646}
5647
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005648void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5649 LocationSummary* locations =
5650 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5651 locations->SetOut(Location::RequiresRegister());
5652}
5653
5654void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005655 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5656}
5657
5658void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5659 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5660}
5661
5662void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5663 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005664}
5665
5666void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5667 LocationSummary* locations =
5668 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
5669 InvokeRuntimeCallingConvention calling_convention;
5670 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5671}
5672
5673void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01005674 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pDeliverException),
5675 instruction,
5676 instruction->GetDexPc(),
5677 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00005678 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005679}
5680
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005681static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5682 return kEmitCompilerReadBarrier &&
5683 (kUseBakerReadBarrier ||
5684 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5685 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5686 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5687}
5688
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005689void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005690 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005691 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5692 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005693 case TypeCheckKind::kExactCheck:
5694 case TypeCheckKind::kAbstractClassCheck:
5695 case TypeCheckKind::kClassHierarchyCheck:
5696 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005697 call_kind =
5698 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005699 break;
5700 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005701 case TypeCheckKind::kUnresolvedCheck:
5702 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005703 call_kind = LocationSummary::kCallOnSlowPath;
5704 break;
5705 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005706
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005707 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005708 locations->SetInAt(0, Location::RequiresRegister());
5709 locations->SetInAt(1, Location::Any());
5710 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5711 locations->SetOut(Location::RequiresRegister());
5712 // When read barriers are enabled, we need a temporary register for
5713 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005714 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005715 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005716 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005717}
5718
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005719void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005720 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005721 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005722 Location obj_loc = locations->InAt(0);
5723 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005724 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005725 Location out_loc = locations->Out();
5726 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005727 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005728 locations->GetTemp(0) :
5729 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005730 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005731 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5732 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5733 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005734 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005735 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005736
5737 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005738 // Avoid null check if we know obj is not null.
5739 if (instruction->MustDoNullCheck()) {
5740 __ testl(obj, obj);
5741 __ j(kEqual, &zero);
5742 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005743
Roland Levillain0d5a2812015-11-13 10:07:31 +00005744 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005745 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005746
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005747 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005748 case TypeCheckKind::kExactCheck: {
5749 if (cls.IsRegister()) {
5750 __ cmpl(out, cls.AsRegister<CpuRegister>());
5751 } else {
5752 DCHECK(cls.IsStackSlot()) << cls;
5753 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5754 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005755 if (zero.IsLinked()) {
5756 // Classes must be equal for the instanceof to succeed.
5757 __ j(kNotEqual, &zero);
5758 __ movl(out, Immediate(1));
5759 __ jmp(&done);
5760 } else {
5761 __ setcc(kEqual, out);
5762 // setcc only sets the low byte.
5763 __ andl(out, Immediate(1));
5764 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005765 break;
5766 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005767
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005768 case TypeCheckKind::kAbstractClassCheck: {
5769 // If the class is abstract, we eagerly fetch the super class of the
5770 // object to avoid doing a comparison we know will fail.
5771 NearLabel loop, success;
5772 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005773 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005774 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005775 __ testl(out, out);
5776 // If `out` is null, we use it for the result, and jump to `done`.
5777 __ j(kEqual, &done);
5778 if (cls.IsRegister()) {
5779 __ cmpl(out, cls.AsRegister<CpuRegister>());
5780 } else {
5781 DCHECK(cls.IsStackSlot()) << cls;
5782 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5783 }
5784 __ j(kNotEqual, &loop);
5785 __ movl(out, Immediate(1));
5786 if (zero.IsLinked()) {
5787 __ jmp(&done);
5788 }
5789 break;
5790 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005791
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792 case TypeCheckKind::kClassHierarchyCheck: {
5793 // Walk over the class hierarchy to find a match.
5794 NearLabel loop, success;
5795 __ Bind(&loop);
5796 if (cls.IsRegister()) {
5797 __ cmpl(out, cls.AsRegister<CpuRegister>());
5798 } else {
5799 DCHECK(cls.IsStackSlot()) << cls;
5800 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5801 }
5802 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005803 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005804 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005805 __ testl(out, out);
5806 __ j(kNotEqual, &loop);
5807 // If `out` is null, we use it for the result, and jump to `done`.
5808 __ jmp(&done);
5809 __ Bind(&success);
5810 __ movl(out, Immediate(1));
5811 if (zero.IsLinked()) {
5812 __ jmp(&done);
5813 }
5814 break;
5815 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005816
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005817 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005818 // Do an exact check.
5819 NearLabel exact_check;
5820 if (cls.IsRegister()) {
5821 __ cmpl(out, cls.AsRegister<CpuRegister>());
5822 } else {
5823 DCHECK(cls.IsStackSlot()) << cls;
5824 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5825 }
5826 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005827 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005828 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005829 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005830 __ testl(out, out);
5831 // If `out` is null, we use it for the result, and jump to `done`.
5832 __ j(kEqual, &done);
5833 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5834 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005835 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005836 __ movl(out, Immediate(1));
5837 __ jmp(&done);
5838 break;
5839 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 case TypeCheckKind::kArrayCheck: {
5842 if (cls.IsRegister()) {
5843 __ cmpl(out, cls.AsRegister<CpuRegister>());
5844 } else {
5845 DCHECK(cls.IsStackSlot()) << cls;
5846 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5847 }
5848 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005849 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5850 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005851 codegen_->AddSlowPath(slow_path);
5852 __ j(kNotEqual, slow_path->GetEntryLabel());
5853 __ movl(out, Immediate(1));
5854 if (zero.IsLinked()) {
5855 __ jmp(&done);
5856 }
5857 break;
5858 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005859
Calin Juravle98893e12015-10-02 21:05:03 +01005860 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005861 case TypeCheckKind::kInterfaceCheck: {
5862 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005863 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005864 // cases.
5865 //
5866 // We cannot directly call the InstanceofNonTrivial runtime
5867 // entry point without resorting to a type checking slow path
5868 // here (i.e. by calling InvokeRuntime directly), as it would
5869 // require to assign fixed registers for the inputs of this
5870 // HInstanceOf instruction (following the runtime calling
5871 // convention), which might be cluttered by the potential first
5872 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005873 //
5874 // TODO: Introduce a new runtime entry point taking the object
5875 // to test (instead of its class) as argument, and let it deal
5876 // with the read barrier issues. This will let us refactor this
5877 // case of the `switch` code as it was previously (with a direct
5878 // call to the runtime not using a type checking slow path).
5879 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005880 DCHECK(locations->OnlyCallsOnSlowPath());
5881 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5882 /* is_fatal */ false);
5883 codegen_->AddSlowPath(slow_path);
5884 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005885 if (zero.IsLinked()) {
5886 __ jmp(&done);
5887 }
5888 break;
5889 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005890 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005891
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005892 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005893 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005894 __ xorl(out, out);
5895 }
5896
5897 if (done.IsLinked()) {
5898 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005899 }
5900
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005901 if (slow_path != nullptr) {
5902 __ Bind(slow_path->GetExitLabel());
5903 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005904}
5905
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005906void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005907 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5908 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005909 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5910 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005911 case TypeCheckKind::kExactCheck:
5912 case TypeCheckKind::kAbstractClassCheck:
5913 case TypeCheckKind::kClassHierarchyCheck:
5914 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005915 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5916 LocationSummary::kCallOnSlowPath :
5917 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005918 break;
5919 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005920 case TypeCheckKind::kUnresolvedCheck:
5921 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005922 call_kind = LocationSummary::kCallOnSlowPath;
5923 break;
5924 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005925 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5926 locations->SetInAt(0, Location::RequiresRegister());
5927 locations->SetInAt(1, Location::Any());
5928 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5929 locations->AddTemp(Location::RequiresRegister());
5930 // When read barriers are enabled, we need an additional temporary
5931 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005932 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005933 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005934 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005935}
5936
5937void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005938 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005939 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005940 Location obj_loc = locations->InAt(0);
5941 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005942 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005943 Location temp_loc = locations->GetTemp(0);
5944 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005945 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005946 locations->GetTemp(1) :
5947 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005948 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5949 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5950 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5951 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005952
Roland Levillain0d5a2812015-11-13 10:07:31 +00005953 bool is_type_check_slow_path_fatal =
5954 (type_check_kind == TypeCheckKind::kExactCheck ||
5955 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5956 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5957 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5958 !instruction->CanThrowIntoCatchBlock();
5959 SlowPathCode* type_check_slow_path =
5960 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5961 is_type_check_slow_path_fatal);
5962 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005963
Roland Levillain0d5a2812015-11-13 10:07:31 +00005964 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005965 case TypeCheckKind::kExactCheck:
5966 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005967 NearLabel done;
5968 // Avoid null check if we know obj is not null.
5969 if (instruction->MustDoNullCheck()) {
5970 __ testl(obj, obj);
5971 __ j(kEqual, &done);
5972 }
5973
5974 // /* HeapReference<Class> */ temp = obj->klass_
5975 GenerateReferenceLoadTwoRegisters(
5976 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
5977
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005978 if (cls.IsRegister()) {
5979 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5980 } else {
5981 DCHECK(cls.IsStackSlot()) << cls;
5982 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5983 }
5984 // Jump to slow path for throwing the exception or doing a
5985 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005986 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005987 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005988 break;
5989 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005990
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005991 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005992 NearLabel done;
5993 // Avoid null check if we know obj is not null.
5994 if (instruction->MustDoNullCheck()) {
5995 __ testl(obj, obj);
5996 __ j(kEqual, &done);
5997 }
5998
5999 // /* HeapReference<Class> */ temp = obj->klass_
6000 GenerateReferenceLoadTwoRegisters(
6001 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6002
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006003 // If the class is abstract, we eagerly fetch the super class of the
6004 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006005 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006006 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006007 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006008 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006009
6010 // If the class reference currently in `temp` is not null, jump
6011 // to the `compare_classes` label to compare it with the checked
6012 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006013 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006014 __ j(kNotEqual, &compare_classes);
6015 // Otherwise, jump to the slow path to throw the exception.
6016 //
6017 // But before, move back the object's class into `temp` before
6018 // going into the slow path, as it has been overwritten in the
6019 // meantime.
6020 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006021 GenerateReferenceLoadTwoRegisters(
6022 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006023 __ jmp(type_check_slow_path->GetEntryLabel());
6024
6025 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006026 if (cls.IsRegister()) {
6027 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6028 } else {
6029 DCHECK(cls.IsStackSlot()) << cls;
6030 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6031 }
6032 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006033 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006034 break;
6035 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006036
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006037 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006038 NearLabel done;
6039 // Avoid null check if we know obj is not null.
6040 if (instruction->MustDoNullCheck()) {
6041 __ testl(obj, obj);
6042 __ j(kEqual, &done);
6043 }
6044
6045 // /* HeapReference<Class> */ temp = obj->klass_
6046 GenerateReferenceLoadTwoRegisters(
6047 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6048
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006049 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006050 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006051 __ Bind(&loop);
6052 if (cls.IsRegister()) {
6053 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6054 } else {
6055 DCHECK(cls.IsStackSlot()) << cls;
6056 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6057 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006058 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006059
Roland Levillain0d5a2812015-11-13 10:07:31 +00006060 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006061 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006062
6063 // If the class reference currently in `temp` is not null, jump
6064 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006065 __ testl(temp, temp);
6066 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006067 // Otherwise, jump to the slow path to throw the exception.
6068 //
6069 // But before, move back the object's class into `temp` before
6070 // going into the slow path, as it has been overwritten in the
6071 // meantime.
6072 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006073 GenerateReferenceLoadTwoRegisters(
6074 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006075 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006076 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006077 break;
6078 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006079
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006080 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006081 // We cannot use a NearLabel here, as its range might be too
6082 // short in some cases when read barriers are enabled. This has
6083 // been observed for instance when the code emitted for this
6084 // case uses high x86-64 registers (R8-R15).
6085 Label done;
6086 // Avoid null check if we know obj is not null.
6087 if (instruction->MustDoNullCheck()) {
6088 __ testl(obj, obj);
6089 __ j(kEqual, &done);
6090 }
6091
6092 // /* HeapReference<Class> */ temp = obj->klass_
6093 GenerateReferenceLoadTwoRegisters(
6094 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6095
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006096 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006097 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006098 if (cls.IsRegister()) {
6099 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6100 } else {
6101 DCHECK(cls.IsStackSlot()) << cls;
6102 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6103 }
6104 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105
6106 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006107 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006108 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006109
6110 // If the component type is not null (i.e. the object is indeed
6111 // an array), jump to label `check_non_primitive_component_type`
6112 // to further check that this component type is not a primitive
6113 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006114 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006115 __ j(kNotEqual, &check_non_primitive_component_type);
6116 // Otherwise, jump to the slow path to throw the exception.
6117 //
6118 // But before, move back the object's class into `temp` before
6119 // going into the slow path, as it has been overwritten in the
6120 // meantime.
6121 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006122 GenerateReferenceLoadTwoRegisters(
6123 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006124 __ jmp(type_check_slow_path->GetEntryLabel());
6125
6126 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006127 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006128 __ j(kEqual, &done);
6129 // Same comment as above regarding `temp` and the slow path.
6130 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006131 GenerateReferenceLoadTwoRegisters(
6132 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006133 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006134 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006135 break;
6136 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006137
Calin Juravle98893e12015-10-02 21:05:03 +01006138 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006139 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006140 NearLabel done;
6141 // Avoid null check if we know obj is not null.
6142 if (instruction->MustDoNullCheck()) {
6143 __ testl(obj, obj);
6144 __ j(kEqual, &done);
6145 }
6146
6147 // /* HeapReference<Class> */ temp = obj->klass_
6148 GenerateReferenceLoadTwoRegisters(
6149 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
6150
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006151 // We always go into the type check slow path for the unresolved
6152 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006153 //
6154 // We cannot directly call the CheckCast runtime entry point
6155 // without resorting to a type checking slow path here (i.e. by
6156 // calling InvokeRuntime directly), as it would require to
6157 // assign fixed registers for the inputs of this HInstanceOf
6158 // instruction (following the runtime calling convention), which
6159 // might be cluttered by the potential first read barrier
6160 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006161 //
6162 // TODO: Introduce a new runtime entry point taking the object
6163 // to test (instead of its class) as argument, and let it deal
6164 // with the read barrier issues. This will let us refactor this
6165 // case of the `switch` code as it was previously (with a direct
6166 // call to the runtime not using a type checking slow path).
6167 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006168 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006169 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006170 break;
6171 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006172
Roland Levillain0d5a2812015-11-13 10:07:31 +00006173 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006174}
6175
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006176void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6177 LocationSummary* locations =
6178 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
6179 InvokeRuntimeCallingConvention calling_convention;
6180 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6181}
6182
6183void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Alexandre Rames8158f282015-08-07 10:26:17 +01006184 codegen_->InvokeRuntime(instruction->IsEnter() ? QUICK_ENTRY_POINT(pLockObject)
6185 : QUICK_ENTRY_POINT(pUnlockObject),
6186 instruction,
6187 instruction->GetDexPc(),
6188 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00006189 if (instruction->IsEnter()) {
6190 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6191 } else {
6192 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6193 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006194}
6195
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006196void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6197void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6198void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6199
6200void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6201 LocationSummary* locations =
6202 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6203 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6204 || instruction->GetResultType() == Primitive::kPrimLong);
6205 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006206 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006207 locations->SetOut(Location::SameAsFirstInput());
6208}
6209
6210void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6211 HandleBitwiseOperation(instruction);
6212}
6213
6214void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6215 HandleBitwiseOperation(instruction);
6216}
6217
6218void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6219 HandleBitwiseOperation(instruction);
6220}
6221
6222void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6223 LocationSummary* locations = instruction->GetLocations();
6224 Location first = locations->InAt(0);
6225 Location second = locations->InAt(1);
6226 DCHECK(first.Equals(locations->Out()));
6227
6228 if (instruction->GetResultType() == Primitive::kPrimInt) {
6229 if (second.IsRegister()) {
6230 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006231 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006232 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006233 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006234 } else {
6235 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006236 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006237 }
6238 } else if (second.IsConstant()) {
6239 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6240 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006241 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006242 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006243 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006244 } else {
6245 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006246 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006247 }
6248 } else {
6249 Address address(CpuRegister(RSP), second.GetStackIndex());
6250 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006251 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006252 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006253 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006254 } else {
6255 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006256 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006257 }
6258 }
6259 } else {
6260 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006261 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6262 bool second_is_constant = false;
6263 int64_t value = 0;
6264 if (second.IsConstant()) {
6265 second_is_constant = true;
6266 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006267 }
Mark Mendell40741f32015-04-20 22:10:34 -04006268 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006269
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006270 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006271 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006272 if (is_int32_value) {
6273 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6274 } else {
6275 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6276 }
6277 } else if (second.IsDoubleStackSlot()) {
6278 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006279 } else {
6280 __ andq(first_reg, second.AsRegister<CpuRegister>());
6281 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006282 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006283 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006284 if (is_int32_value) {
6285 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6286 } else {
6287 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6288 }
6289 } else if (second.IsDoubleStackSlot()) {
6290 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006291 } else {
6292 __ orq(first_reg, second.AsRegister<CpuRegister>());
6293 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006294 } else {
6295 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006296 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006297 if (is_int32_value) {
6298 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6299 } else {
6300 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6301 }
6302 } else if (second.IsDoubleStackSlot()) {
6303 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006304 } else {
6305 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6306 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006307 }
6308 }
6309}
6310
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006311void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6312 Location out,
6313 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006314 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006315 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6316 if (kEmitCompilerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006317 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006318 if (kUseBakerReadBarrier) {
6319 // Load with fast path based Baker's read barrier.
6320 // /* HeapReference<Object> */ out = *(out + offset)
6321 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006322 instruction, out, out_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006323 } else {
6324 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006325 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006326 // in the following move operation, as we will need it for the
6327 // read barrier below.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006328 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006329 // /* HeapReference<Object> */ out = *(out + offset)
6330 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006331 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006332 }
6333 } else {
6334 // Plain load with no read barrier.
6335 // /* HeapReference<Object> */ out = *(out + offset)
6336 __ movl(out_reg, Address(out_reg, offset));
6337 __ MaybeUnpoisonHeapReference(out_reg);
6338 }
6339}
6340
6341void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6342 Location out,
6343 Location obj,
6344 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006345 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006346 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6347 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6348 if (kEmitCompilerReadBarrier) {
6349 if (kUseBakerReadBarrier) {
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006350 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006351 // Load with fast path based Baker's read barrier.
6352 // /* HeapReference<Object> */ out = *(obj + offset)
6353 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006354 instruction, out, obj_reg, offset, maybe_temp, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006355 } else {
6356 // Load with slow path based read barrier.
6357 // /* HeapReference<Object> */ out = *(obj + offset)
6358 __ movl(out_reg, Address(obj_reg, offset));
6359 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6360 }
6361 } else {
6362 // Plain load with no read barrier.
6363 // /* HeapReference<Object> */ out = *(obj + offset)
6364 __ movl(out_reg, Address(obj_reg, offset));
6365 __ MaybeUnpoisonHeapReference(out_reg);
6366 }
6367}
6368
6369void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6370 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006371 const Address& address,
6372 Label* fixup_label) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006373 CpuRegister root_reg = root.AsRegister<CpuRegister>();
6374 if (kEmitCompilerReadBarrier) {
6375 if (kUseBakerReadBarrier) {
6376 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6377 // Baker's read barrier are used:
6378 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006379 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006380 // if (Thread::Current()->GetIsGcMarking()) {
6381 // root = ReadBarrier::Mark(root)
6382 // }
6383
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006384 // /* GcRoot<mirror::Object> */ root = *address
6385 __ movl(root_reg, address);
6386 if (fixup_label != nullptr) {
6387 __ Bind(fixup_label);
6388 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006389 static_assert(
6390 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6391 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6392 "have different sizes.");
6393 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6394 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6395 "have different sizes.");
6396
6397 // Slow path used to mark the GC root `root`.
6398 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006399 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, root);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006400 codegen_->AddSlowPath(slow_path);
6401
6402 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64WordSize>().Int32Value(),
6403 /* no_rip */ true),
6404 Immediate(0));
6405 __ j(kNotEqual, slow_path->GetEntryLabel());
6406 __ Bind(slow_path->GetExitLabel());
6407 } else {
6408 // GC root loaded through a slow path for read barriers other
6409 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006410 // /* GcRoot<mirror::Object>* */ root = address
6411 __ leaq(root_reg, address);
6412 if (fixup_label != nullptr) {
6413 __ Bind(fixup_label);
6414 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006415 // /* mirror::Object* */ root = root->Read()
6416 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6417 }
6418 } else {
6419 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006420 // /* GcRoot<mirror::Object> */ root = *address
6421 __ movl(root_reg, address);
6422 if (fixup_label != nullptr) {
6423 __ Bind(fixup_label);
6424 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006425 // Note that GC roots are not affected by heap poisoning, thus we
6426 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006427 }
6428}
6429
6430void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6431 Location ref,
6432 CpuRegister obj,
6433 uint32_t offset,
6434 Location temp,
6435 bool needs_null_check) {
6436 DCHECK(kEmitCompilerReadBarrier);
6437 DCHECK(kUseBakerReadBarrier);
6438
6439 // /* HeapReference<Object> */ ref = *(obj + offset)
6440 Address src(obj, offset);
6441 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6442}
6443
6444void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6445 Location ref,
6446 CpuRegister obj,
6447 uint32_t data_offset,
6448 Location index,
6449 Location temp,
6450 bool needs_null_check) {
6451 DCHECK(kEmitCompilerReadBarrier);
6452 DCHECK(kUseBakerReadBarrier);
6453
Roland Levillain3d312422016-06-23 13:53:42 +01006454 static_assert(
6455 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6456 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006457 // /* HeapReference<Object> */ ref =
6458 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6459 Address src = index.IsConstant() ?
6460 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset) :
6461 Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset);
6462 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, temp, needs_null_check);
6463}
6464
6465void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6466 Location ref,
6467 CpuRegister obj,
6468 const Address& src,
6469 Location temp,
6470 bool needs_null_check) {
6471 DCHECK(kEmitCompilerReadBarrier);
6472 DCHECK(kUseBakerReadBarrier);
6473
6474 // In slow path based read barriers, the read barrier call is
6475 // inserted after the original load. However, in fast path based
6476 // Baker's read barriers, we need to perform the load of
6477 // mirror::Object::monitor_ *before* the original reference load.
6478 // This load-load ordering is required by the read barrier.
6479 // The fast path/slow path (for Baker's algorithm) should look like:
6480 //
6481 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6482 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6483 // HeapReference<Object> ref = *src; // Original reference load.
6484 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6485 // if (is_gray) {
6486 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6487 // }
6488 //
6489 // Note: the original implementation in ReadBarrier::Barrier is
6490 // slightly more complex as:
6491 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006492 // the high-bits of rb_state, which are expected to be all zeroes
6493 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6494 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006495 // - it performs additional checks that we do not do here for
6496 // performance reasons.
6497
6498 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
6499 CpuRegister temp_reg = temp.AsRegister<CpuRegister>();
6500 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6501
6502 // /* int32_t */ monitor = obj->monitor_
6503 __ movl(temp_reg, Address(obj, monitor_offset));
6504 if (needs_null_check) {
6505 MaybeRecordImplicitNullCheck(instruction);
6506 }
6507 // /* LockWord */ lock_word = LockWord(monitor)
6508 static_assert(sizeof(LockWord) == sizeof(int32_t),
6509 "art::LockWord and int32_t have different sizes.");
6510 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
6511 __ shrl(temp_reg, Immediate(LockWord::kReadBarrierStateShift));
6512 __ andl(temp_reg, Immediate(LockWord::kReadBarrierStateMask));
6513 static_assert(
6514 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
6515 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
6516
6517 // Load fence to prevent load-load reordering.
6518 // Note that this is a no-op, thanks to the x86-64 memory model.
6519 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6520
6521 // The actual reference load.
6522 // /* HeapReference<Object> */ ref = *src
6523 __ movl(ref_reg, src);
6524
6525 // Object* ref = ref_addr->AsMirrorPtr()
6526 __ MaybeUnpoisonHeapReference(ref_reg);
6527
6528 // Slow path used to mark the object `ref` when it is gray.
6529 SlowPathCode* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01006530 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(instruction, ref);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006531 AddSlowPath(slow_path);
6532
6533 // if (rb_state == ReadBarrier::gray_ptr_)
6534 // ref = ReadBarrier::Mark(ref);
6535 __ cmpl(temp_reg, Immediate(ReadBarrier::gray_ptr_));
6536 __ j(kEqual, slow_path->GetEntryLabel());
6537 __ Bind(slow_path->GetExitLabel());
6538}
6539
6540void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6541 Location out,
6542 Location ref,
6543 Location obj,
6544 uint32_t offset,
6545 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006546 DCHECK(kEmitCompilerReadBarrier);
6547
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006548 // Insert a slow path based read barrier *after* the reference load.
6549 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006550 // If heap poisoning is enabled, the unpoisoning of the loaded
6551 // reference will be carried out by the runtime within the slow
6552 // path.
6553 //
6554 // Note that `ref` currently does not get unpoisoned (when heap
6555 // poisoning is enabled), which is alright as the `ref` argument is
6556 // not used by the artReadBarrierSlow entry point.
6557 //
6558 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6559 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6560 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6561 AddSlowPath(slow_path);
6562
Roland Levillain0d5a2812015-11-13 10:07:31 +00006563 __ jmp(slow_path->GetEntryLabel());
6564 __ Bind(slow_path->GetExitLabel());
6565}
6566
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006567void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6568 Location out,
6569 Location ref,
6570 Location obj,
6571 uint32_t offset,
6572 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006573 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006574 // Baker's read barriers shall be handled by the fast path
6575 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6576 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006577 // If heap poisoning is enabled, unpoisoning will be taken care of
6578 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006579 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006580 } else if (kPoisonHeapReferences) {
6581 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6582 }
6583}
6584
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006585void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6586 Location out,
6587 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006588 DCHECK(kEmitCompilerReadBarrier);
6589
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006590 // Insert a slow path based read barrier *after* the GC root load.
6591 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006592 // Note that GC roots are not affected by heap poisoning, so we do
6593 // not need to do anything special for this here.
6594 SlowPathCode* slow_path =
6595 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6596 AddSlowPath(slow_path);
6597
Roland Levillain0d5a2812015-11-13 10:07:31 +00006598 __ jmp(slow_path->GetEntryLabel());
6599 __ Bind(slow_path->GetExitLabel());
6600}
6601
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006602void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006603 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006604 LOG(FATAL) << "Unreachable";
6605}
6606
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006607void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006608 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006609 LOG(FATAL) << "Unreachable";
6610}
6611
Mark Mendellfe57faa2015-09-18 09:26:15 -04006612// Simple implementation of packed switch - generate cascaded compare/jumps.
6613void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6614 LocationSummary* locations =
6615 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6616 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006617 locations->AddTemp(Location::RequiresRegister());
6618 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006619}
6620
6621void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6622 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006623 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006624 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006625 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6626 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6627 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006628 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6629
6630 // Should we generate smaller inline compare/jumps?
6631 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6632 // Figure out the correct compare values and jump conditions.
6633 // Handle the first compare/branch as a special case because it might
6634 // jump to the default case.
6635 DCHECK_GT(num_entries, 2u);
6636 Condition first_condition;
6637 uint32_t index;
6638 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6639 if (lower_bound != 0) {
6640 first_condition = kLess;
6641 __ cmpl(value_reg_in, Immediate(lower_bound));
6642 __ j(first_condition, codegen_->GetLabelOf(default_block));
6643 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6644
6645 index = 1;
6646 } else {
6647 // Handle all the compare/jumps below.
6648 first_condition = kBelow;
6649 index = 0;
6650 }
6651
6652 // Handle the rest of the compare/jumps.
6653 for (; index + 1 < num_entries; index += 2) {
6654 int32_t compare_to_value = lower_bound + index + 1;
6655 __ cmpl(value_reg_in, Immediate(compare_to_value));
6656 // Jump to successors[index] if value < case_value[index].
6657 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6658 // Jump to successors[index + 1] if value == case_value[index + 1].
6659 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6660 }
6661
6662 if (index != num_entries) {
6663 // There are an odd number of entries. Handle the last one.
6664 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006665 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006666 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6667 }
6668
6669 // And the default for any other value.
6670 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6671 __ jmp(codegen_->GetLabelOf(default_block));
6672 }
6673 return;
6674 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006675
6676 // Remove the bias, if needed.
6677 Register value_reg_out = value_reg_in.AsRegister();
6678 if (lower_bound != 0) {
6679 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6680 value_reg_out = temp_reg.AsRegister();
6681 }
6682 CpuRegister value_reg(value_reg_out);
6683
6684 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006685 __ cmpl(value_reg, Immediate(num_entries - 1));
6686 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006687
Mark Mendell9c86b482015-09-18 13:36:07 -04006688 // We are in the range of the table.
6689 // Load the address of the jump table in the constant area.
6690 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006691
Mark Mendell9c86b482015-09-18 13:36:07 -04006692 // Load the (signed) offset from the jump table.
6693 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6694
6695 // Add the offset to the address of the table base.
6696 __ addq(temp_reg, base_reg);
6697
6698 // And jump.
6699 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006700}
6701
Aart Bikc5d47542016-01-27 17:00:35 -08006702void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6703 if (value == 0) {
6704 __ xorl(dest, dest);
6705 } else {
6706 __ movl(dest, Immediate(value));
6707 }
6708}
6709
Mark Mendell92e83bf2015-05-07 11:25:03 -04006710void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6711 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006712 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006713 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006714 } else if (IsUint<32>(value)) {
6715 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006716 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6717 } else {
6718 __ movq(dest, Immediate(value));
6719 }
6720}
6721
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006722void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6723 if (value == 0) {
6724 __ xorps(dest, dest);
6725 } else {
6726 __ movss(dest, LiteralInt32Address(value));
6727 }
6728}
6729
6730void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6731 if (value == 0) {
6732 __ xorpd(dest, dest);
6733 } else {
6734 __ movsd(dest, LiteralInt64Address(value));
6735 }
6736}
6737
6738void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6739 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6740}
6741
6742void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6743 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6744}
6745
Aart Bika19616e2016-02-01 18:57:58 -08006746void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6747 if (value == 0) {
6748 __ testl(dest, dest);
6749 } else {
6750 __ cmpl(dest, Immediate(value));
6751 }
6752}
6753
6754void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6755 if (IsInt<32>(value)) {
6756 if (value == 0) {
6757 __ testq(dest, dest);
6758 } else {
6759 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6760 }
6761 } else {
6762 // Value won't fit in an int.
6763 __ cmpq(dest, LiteralInt64Address(value));
6764 }
6765}
6766
Mark Mendellcfa410b2015-05-25 16:02:44 -04006767void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6768 DCHECK(dest.IsDoubleStackSlot());
6769 if (IsInt<32>(value)) {
6770 // Can move directly as an int32 constant.
6771 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6772 Immediate(static_cast<int32_t>(value)));
6773 } else {
6774 Load64BitValue(CpuRegister(TMP), value);
6775 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6776 }
6777}
6778
Mark Mendell9c86b482015-09-18 13:36:07 -04006779/**
6780 * Class to handle late fixup of offsets into constant area.
6781 */
6782class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6783 public:
6784 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6785 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6786
6787 protected:
6788 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6789
6790 CodeGeneratorX86_64* codegen_;
6791
6792 private:
6793 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6794 // Patch the correct offset for the instruction. We use the address of the
6795 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6796 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6797 int32_t relative_position = constant_offset - pos;
6798
6799 // Patch in the right value.
6800 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6801 }
6802
6803 // Location in constant area that the fixup refers to.
6804 size_t offset_into_constant_area_;
6805};
6806
6807/**
6808 t * Class to handle late fixup of offsets to a jump table that will be created in the
6809 * constant area.
6810 */
6811class JumpTableRIPFixup : public RIPFixup {
6812 public:
6813 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6814 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6815
6816 void CreateJumpTable() {
6817 X86_64Assembler* assembler = codegen_->GetAssembler();
6818
6819 // Ensure that the reference to the jump table has the correct offset.
6820 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6821 SetOffset(offset_in_constant_table);
6822
6823 // Compute the offset from the start of the function to this jump table.
6824 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6825
6826 // Populate the jump table with the correct values for the jump table.
6827 int32_t num_entries = switch_instr_->GetNumEntries();
6828 HBasicBlock* block = switch_instr_->GetBlock();
6829 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6830 // The value that we want is the target offset - the position of the table.
6831 for (int32_t i = 0; i < num_entries; i++) {
6832 HBasicBlock* b = successors[i];
6833 Label* l = codegen_->GetLabelOf(b);
6834 DCHECK(l->IsBound());
6835 int32_t offset_to_block = l->Position() - current_table_offset;
6836 assembler->AppendInt32(offset_to_block);
6837 }
6838 }
6839
6840 private:
6841 const HPackedSwitch* switch_instr_;
6842};
6843
Mark Mendellf55c3e02015-03-26 21:07:46 -04006844void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6845 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006846 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006847 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6848 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006849 assembler->Align(4, 0);
6850 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006851
6852 // Populate any jump tables.
6853 for (auto jump_table : fixups_to_jump_tables_) {
6854 jump_table->CreateJumpTable();
6855 }
6856
6857 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006858 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006859 }
6860
6861 // And finish up.
6862 CodeGenerator::Finalize(allocator);
6863}
6864
Mark Mendellf55c3e02015-03-26 21:07:46 -04006865Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6866 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6867 return Address::RIP(fixup);
6868}
6869
6870Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6871 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6872 return Address::RIP(fixup);
6873}
6874
6875Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6876 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6877 return Address::RIP(fixup);
6878}
6879
6880Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6881 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6882 return Address::RIP(fixup);
6883}
6884
Andreas Gampe85b62f22015-09-09 13:15:38 -07006885// TODO: trg as memory.
6886void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6887 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006888 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006889 return;
6890 }
6891
6892 DCHECK_NE(type, Primitive::kPrimVoid);
6893
6894 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6895 if (trg.Equals(return_loc)) {
6896 return;
6897 }
6898
6899 // Let the parallel move resolver take care of all of this.
6900 HParallelMove parallel_move(GetGraph()->GetArena());
6901 parallel_move.AddMove(return_loc, trg, type, nullptr);
6902 GetMoveResolver()->EmitNativeCode(&parallel_move);
6903}
6904
Mark Mendell9c86b482015-09-18 13:36:07 -04006905Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6906 // Create a fixup to be used to create and address the jump table.
6907 JumpTableRIPFixup* table_fixup =
6908 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6909
6910 // We have to populate the jump tables.
6911 fixups_to_jump_tables_.push_back(table_fixup);
6912 return Address::RIP(table_fixup);
6913}
6914
Mark Mendellea5af682015-10-22 17:35:49 -04006915void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6916 const Address& addr_high,
6917 int64_t v,
6918 HInstruction* instruction) {
6919 if (IsInt<32>(v)) {
6920 int32_t v_32 = v;
6921 __ movq(addr_low, Immediate(v_32));
6922 MaybeRecordImplicitNullCheck(instruction);
6923 } else {
6924 // Didn't fit in a register. Do it in pieces.
6925 int32_t low_v = Low32Bits(v);
6926 int32_t high_v = High32Bits(v);
6927 __ movl(addr_low, Immediate(low_v));
6928 MaybeRecordImplicitNullCheck(instruction);
6929 __ movl(addr_high, Immediate(high_v));
6930 }
6931}
6932
Roland Levillain4d027112015-07-01 15:41:14 +01006933#undef __
6934
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006935} // namespace x86_64
6936} // namespace art