blob: 163858e2072d1e824fae6c0a4a465dce61967a75 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
202 __ andl(length_loc.AsRegister<CpuRegister>(), Immediate(INT32_MAX));
203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100250 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000251 at_,
252 dex_pc_,
253 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000254 if (do_clinit_) {
255 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
256 } else {
257 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
258 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100259
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000260 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000262 if (out.IsValid()) {
263 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000265 }
266
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000267 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100268 __ jmp(GetExitLabel());
269 }
270
Alexandre Rames9931f312015-06-19 14:47:01 +0100271 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
272
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The class this slow path will load.
275 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100276
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000277 // The instruction where this slow path is happening.
278 // (Might be the load class or an initialization check).
279 HInstruction* const at_;
280
281 // The dex PC of `at_`.
282 const uint32_t dex_pc_;
283
284 // Whether to initialize the class.
285 const bool do_clinit_;
286
287 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100288};
289
Vladimir Markoaad75c62016-10-03 08:46:48 +0000290class LoadStringSlowPathX86_64 : public SlowPathCode {
291 public:
292 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
293
294 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
295 LocationSummary* locations = instruction_->GetLocations();
296 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
297
298 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
299 __ Bind(GetEntryLabel());
300 SaveLiveRegisters(codegen, locations);
301
Vladimir Markoaad75c62016-10-03 08:46:48 +0000302 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100303 // Custom calling convention: RAX serves as both input and output.
304 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305 x86_64_codegen->InvokeRuntime(kQuickResolveString,
306 instruction_,
307 instruction_->GetDexPc(),
308 this);
309 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
310 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
311 RestoreLiveRegisters(codegen, locations);
312
313 // Store the resolved String to the BSS entry.
314 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
315 locations->Out().AsRegister<CpuRegister>());
316 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
317 __ Bind(fixup_label);
318
319 __ jmp(GetExitLabel());
320 }
321
322 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
323
324 private:
325 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
326};
327
Andreas Gampe85b62f22015-09-09 13:15:38 -0700328class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000329 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000330 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000331 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000333 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000334 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100335 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
336 : locations->Out();
337 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000338 DCHECK(instruction_->IsCheckCast()
339 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000340
Roland Levillain0d5a2812015-11-13 10:07:31 +0000341 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000342 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000344 if (!is_fatal_) {
345 SaveLiveRegisters(codegen, locations);
346 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
348 // We're moving two locations to locations that could overlap, so we need a parallel
349 // move resolver.
350 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000351 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100352 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000353 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100354 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100355 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100356 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
357 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000358
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100360 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000361 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700362 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 } else {
364 DCHECK(instruction_->IsCheckCast());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100365 x86_64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000366 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000367 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000368
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000369 if (!is_fatal_) {
370 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000371 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 RestoreLiveRegisters(codegen, locations);
375 __ jmp(GetExitLabel());
376 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377 }
378
Alexandre Rames9931f312015-06-19 14:47:01 +0100379 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
380
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000381 bool IsFatal() const OVERRIDE { return is_fatal_; }
382
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000385
386 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
387};
388
Andreas Gampe85b62f22015-09-09 13:15:38 -0700389class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700390 public:
Aart Bik42249c32016-01-07 15:33:50 -0800391 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393
394 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000395 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100397 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000398 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 }
400
Alexandre Rames9931f312015-06-19 14:47:01 +0100401 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
402
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700403 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
405};
406
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100407class ArraySetSlowPathX86_64 : public SlowPathCode {
408 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000409 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100410
411 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
412 LocationSummary* locations = instruction_->GetLocations();
413 __ Bind(GetEntryLabel());
414 SaveLiveRegisters(codegen, locations);
415
416 InvokeRuntimeCallingConvention calling_convention;
417 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
418 parallel_move.AddMove(
419 locations->InAt(0),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
421 Primitive::kPrimNot,
422 nullptr);
423 parallel_move.AddMove(
424 locations->InAt(1),
425 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
426 Primitive::kPrimInt,
427 nullptr);
428 parallel_move.AddMove(
429 locations->InAt(2),
430 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
431 Primitive::kPrimNot,
432 nullptr);
433 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
434
Roland Levillain0d5a2812015-11-13 10:07:31 +0000435 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100436 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000437 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100438 RestoreLiveRegisters(codegen, locations);
439 __ jmp(GetExitLabel());
440 }
441
442 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
443
444 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100445 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
446};
447
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100448// Slow path marking an object reference `ref` during a read
449// barrier. The field `obj.field` in the object `obj` holding this
450// reference does not get updated by this slow path after marking (see
451// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
452//
453// This means that after the execution of this slow path, `ref` will
454// always be up-to-date, but `obj.field` may not; i.e., after the
455// flip, `ref` will be a to-space reference, but `obj.field` will
456// probably still be a from-space reference (unless it gets updated by
457// another thread, or if another thread installed another object
458// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000459class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
460 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100461 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
462 Location ref,
463 bool unpoison_ref_before_marking)
464 : SlowPathCode(instruction),
465 ref_(ref),
466 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000467 DCHECK(kEmitCompilerReadBarrier);
468 }
469
470 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
471
472 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
473 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100474 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
475 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100477 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000478 DCHECK(instruction_->IsInstanceFieldGet() ||
479 instruction_->IsStaticFieldGet() ||
480 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100481 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000482 instruction_->IsLoadClass() ||
483 instruction_->IsLoadString() ||
484 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100485 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100486 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
487 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488 << "Unexpected instruction in read barrier marking slow path: "
489 << instruction_->DebugName();
490
491 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100492 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000493 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100494 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000495 }
Roland Levillain4359e612016-07-20 11:32:19 +0100496 // No need to save live registers; it's taken care of by the
497 // entrypoint. Also, there is no need to update the stack mask,
498 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100500 DCHECK_NE(ref_reg, RSP);
501 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100502 // "Compact" slow path, saving two moves.
503 //
504 // Instead of using the standard runtime calling convention (input
505 // and output in R0):
506 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100510 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100511 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100512 // of a dedicated entrypoint:
513 //
514 // rX <- ReadBarrierMarkRegX(rX)
515 //
516 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100518 // This runtime call does not require a stack map.
519 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000520 __ jmp(GetExitLabel());
521 }
522
523 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // The location (register) of the marked object reference.
525 const Location ref_;
526 // Should the reference in `ref_` be unpoisoned prior to marking it?
527 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000528
529 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
530};
531
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100532// Slow path marking an object reference `ref` during a read barrier,
533// and if needed, atomically updating the field `obj.field` in the
534// object `obj` holding this reference after marking (contrary to
535// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
536// `obj.field`).
537//
538// This means that after the execution of this slow path, both `ref`
539// and `obj.field` will be up-to-date; i.e., after the flip, both will
540// hold the same to-space reference (unless another thread installed
541// another object reference (different from `ref`) in `obj.field`).
542class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
543 public:
544 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
545 Location ref,
546 CpuRegister obj,
547 const Address& field_addr,
548 bool unpoison_ref_before_marking,
549 CpuRegister temp1,
550 CpuRegister temp2)
551 : SlowPathCode(instruction),
552 ref_(ref),
553 obj_(obj),
554 field_addr_(field_addr),
555 unpoison_ref_before_marking_(unpoison_ref_before_marking),
556 temp1_(temp1),
557 temp2_(temp2) {
558 DCHECK(kEmitCompilerReadBarrier);
559 }
560
561 const char* GetDescription() const OVERRIDE {
562 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
563 }
564
565 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
566 LocationSummary* locations = instruction_->GetLocations();
567 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
568 Register ref_reg = ref_cpu_reg.AsRegister();
569 DCHECK(locations->CanCall());
570 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
571 // This slow path is only used by the UnsafeCASObject intrinsic.
572 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
573 << "Unexpected instruction in read barrier marking and field updating slow path: "
574 << instruction_->DebugName();
575 DCHECK(instruction_->GetLocations()->Intrinsified());
576 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
577
578 __ Bind(GetEntryLabel());
579 if (unpoison_ref_before_marking_) {
580 // Object* ref = ref_addr->AsMirrorPtr()
581 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
582 }
583
584 // Save the old (unpoisoned) reference.
585 __ movl(temp1_, ref_cpu_reg);
586
587 // No need to save live registers; it's taken care of by the
588 // entrypoint. Also, there is no need to update the stack mask,
589 // as this runtime call will not trigger a garbage collection.
590 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
591 DCHECK_NE(ref_reg, RSP);
592 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
593 // "Compact" slow path, saving two moves.
594 //
595 // Instead of using the standard runtime calling convention (input
596 // and output in R0):
597 //
598 // RDI <- ref
599 // RAX <- ReadBarrierMark(RDI)
600 // ref <- RAX
601 //
602 // we just use rX (the register containing `ref`) as input and output
603 // of a dedicated entrypoint:
604 //
605 // rX <- ReadBarrierMarkRegX(rX)
606 //
607 int32_t entry_point_offset =
608 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
609 // This runtime call does not require a stack map.
610 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
611
612 // If the new reference is different from the old reference,
613 // update the field in the holder (`*field_addr`).
614 //
615 // Note that this field could also hold a different object, if
616 // another thread had concurrently changed it. In that case, the
617 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
618 // operation below would abort the CAS, leaving the field as-is.
619 NearLabel done;
620 __ cmpl(temp1_, ref_cpu_reg);
621 __ j(kEqual, &done);
622
623 // Update the the holder's field atomically. This may fail if
624 // mutator updates before us, but it's OK. This is achived
625 // using a strong compare-and-set (CAS) operation with relaxed
626 // memory synchronization ordering, where the expected value is
627 // the old reference and the desired value is the new reference.
628 // This operation is implemented with a 32-bit LOCK CMPXLCHG
629 // instruction, which requires the expected value (the old
630 // reference) to be in EAX. Save RAX beforehand, and move the
631 // expected value (stored in `temp1_`) into EAX.
632 __ movq(temp2_, CpuRegister(RAX));
633 __ movl(CpuRegister(RAX), temp1_);
634
635 // Convenience aliases.
636 CpuRegister base = obj_;
637 CpuRegister expected = CpuRegister(RAX);
638 CpuRegister value = ref_cpu_reg;
639
640 bool base_equals_value = (base.AsRegister() == value.AsRegister());
641 Register value_reg = ref_reg;
642 if (kPoisonHeapReferences) {
643 if (base_equals_value) {
644 // If `base` and `value` are the same register location, move
645 // `value_reg` to a temporary register. This way, poisoning
646 // `value_reg` won't invalidate `base`.
647 value_reg = temp1_.AsRegister();
648 __ movl(CpuRegister(value_reg), base);
649 }
650
651 // Check that the register allocator did not assign the location
652 // of `expected` (RAX) to `value` nor to `base`, so that heap
653 // poisoning (when enabled) works as intended below.
654 // - If `value` were equal to `expected`, both references would
655 // be poisoned twice, meaning they would not be poisoned at
656 // all, as heap poisoning uses address negation.
657 // - If `base` were equal to `expected`, poisoning `expected`
658 // would invalidate `base`.
659 DCHECK_NE(value_reg, expected.AsRegister());
660 DCHECK_NE(base.AsRegister(), expected.AsRegister());
661
662 __ PoisonHeapReference(expected);
663 __ PoisonHeapReference(CpuRegister(value_reg));
664 }
665
666 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
667
668 // If heap poisoning is enabled, we need to unpoison the values
669 // that were poisoned earlier.
670 if (kPoisonHeapReferences) {
671 if (base_equals_value) {
672 // `value_reg` has been moved to a temporary register, no need
673 // to unpoison it.
674 } else {
675 __ UnpoisonHeapReference(CpuRegister(value_reg));
676 }
677 // No need to unpoison `expected` (RAX), as it is be overwritten below.
678 }
679
680 // Restore RAX.
681 __ movq(CpuRegister(RAX), temp2_);
682
683 __ Bind(&done);
684 __ jmp(GetExitLabel());
685 }
686
687 private:
688 // The location (register) of the marked object reference.
689 const Location ref_;
690 // The register containing the object holding the marked object reference field.
691 const CpuRegister obj_;
692 // The address of the marked reference field. The base of this address must be `obj_`.
693 const Address field_addr_;
694
695 // Should the reference in `ref_` be unpoisoned prior to marking it?
696 const bool unpoison_ref_before_marking_;
697
698 const CpuRegister temp1_;
699 const CpuRegister temp2_;
700
701 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
702};
703
Roland Levillain0d5a2812015-11-13 10:07:31 +0000704// Slow path generating a read barrier for a heap reference.
705class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
706 public:
707 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
708 Location out,
709 Location ref,
710 Location obj,
711 uint32_t offset,
712 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000713 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000714 out_(out),
715 ref_(ref),
716 obj_(obj),
717 offset_(offset),
718 index_(index) {
719 DCHECK(kEmitCompilerReadBarrier);
720 // If `obj` is equal to `out` or `ref`, it means the initial
721 // object has been overwritten by (or after) the heap object
722 // reference load to be instrumented, e.g.:
723 //
724 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000725 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000726 //
727 // In that case, we have lost the information about the original
728 // object, and the emitted read barrier cannot work properly.
729 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
730 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
731}
732
733 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
734 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
735 LocationSummary* locations = instruction_->GetLocations();
736 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
737 DCHECK(locations->CanCall());
738 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100739 DCHECK(instruction_->IsInstanceFieldGet() ||
740 instruction_->IsStaticFieldGet() ||
741 instruction_->IsArrayGet() ||
742 instruction_->IsInstanceOf() ||
743 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100744 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000745 << "Unexpected instruction in read barrier for heap reference slow path: "
746 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000747
748 __ Bind(GetEntryLabel());
749 SaveLiveRegisters(codegen, locations);
750
751 // We may have to change the index's value, but as `index_` is a
752 // constant member (like other "inputs" of this slow path),
753 // introduce a copy of it, `index`.
754 Location index = index_;
755 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100756 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000757 if (instruction_->IsArrayGet()) {
758 // Compute real offset and store it in index_.
759 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
760 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
761 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
762 // We are about to change the value of `index_reg` (see the
763 // calls to art::x86_64::X86_64Assembler::shll and
764 // art::x86_64::X86_64Assembler::AddImmediate below), but it
765 // has not been saved by the previous call to
766 // art::SlowPathCode::SaveLiveRegisters, as it is a
767 // callee-save register --
768 // art::SlowPathCode::SaveLiveRegisters does not consider
769 // callee-save registers, as it has been designed with the
770 // assumption that callee-save registers are supposed to be
771 // handled by the called function. So, as a callee-save
772 // register, `index_reg` _would_ eventually be saved onto
773 // the stack, but it would be too late: we would have
774 // changed its value earlier. Therefore, we manually save
775 // it here into another freely available register,
776 // `free_reg`, chosen of course among the caller-save
777 // registers (as a callee-save `free_reg` register would
778 // exhibit the same problem).
779 //
780 // Note we could have requested a temporary register from
781 // the register allocator instead; but we prefer not to, as
782 // this is a slow path, and we know we can find a
783 // caller-save register that is available.
784 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
785 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
786 index_reg = free_reg;
787 index = Location::RegisterLocation(index_reg);
788 } else {
789 // The initial register stored in `index_` has already been
790 // saved in the call to art::SlowPathCode::SaveLiveRegisters
791 // (as it is not a callee-save register), so we can freely
792 // use it.
793 }
794 // Shifting the index value contained in `index_reg` by the
795 // scale factor (2) cannot overflow in practice, as the
796 // runtime is unable to allocate object arrays with a size
797 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
798 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
799 static_assert(
800 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
801 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
802 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
803 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100804 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
805 // intrinsics, `index_` is not shifted by a scale factor of 2
806 // (as in the case of ArrayGet), as it is actually an offset
807 // to an object field within an object.
808 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000809 DCHECK(instruction_->GetLocations()->Intrinsified());
810 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
811 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
812 << instruction_->AsInvoke()->GetIntrinsic();
813 DCHECK_EQ(offset_, 0U);
814 DCHECK(index_.IsRegister());
815 }
816 }
817
818 // We're moving two or three locations to locations that could
819 // overlap, so we need a parallel move resolver.
820 InvokeRuntimeCallingConvention calling_convention;
821 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
822 parallel_move.AddMove(ref_,
823 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
824 Primitive::kPrimNot,
825 nullptr);
826 parallel_move.AddMove(obj_,
827 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
828 Primitive::kPrimNot,
829 nullptr);
830 if (index.IsValid()) {
831 parallel_move.AddMove(index,
832 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
833 Primitive::kPrimInt,
834 nullptr);
835 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
836 } else {
837 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
838 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
839 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100840 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000841 instruction_,
842 instruction_->GetDexPc(),
843 this);
844 CheckEntrypointTypes<
845 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
846 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
847
848 RestoreLiveRegisters(codegen, locations);
849 __ jmp(GetExitLabel());
850 }
851
852 const char* GetDescription() const OVERRIDE {
853 return "ReadBarrierForHeapReferenceSlowPathX86_64";
854 }
855
856 private:
857 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
858 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
859 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
860 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
861 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
862 return static_cast<CpuRegister>(i);
863 }
864 }
865 // We shall never fail to find a free caller-save register, as
866 // there are more than two core caller-save registers on x86-64
867 // (meaning it is possible to find one which is different from
868 // `ref` and `obj`).
869 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
870 LOG(FATAL) << "Could not find a free caller-save register";
871 UNREACHABLE();
872 }
873
Roland Levillain0d5a2812015-11-13 10:07:31 +0000874 const Location out_;
875 const Location ref_;
876 const Location obj_;
877 const uint32_t offset_;
878 // An additional location containing an index to an array.
879 // Only used for HArrayGet and the UnsafeGetObject &
880 // UnsafeGetObjectVolatile intrinsics.
881 const Location index_;
882
883 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
884};
885
886// Slow path generating a read barrier for a GC root.
887class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
888 public:
889 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000890 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000891 DCHECK(kEmitCompilerReadBarrier);
892 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000893
894 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
895 LocationSummary* locations = instruction_->GetLocations();
896 DCHECK(locations->CanCall());
897 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000898 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
899 << "Unexpected instruction in read barrier for GC root slow path: "
900 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000901
902 __ Bind(GetEntryLabel());
903 SaveLiveRegisters(codegen, locations);
904
905 InvokeRuntimeCallingConvention calling_convention;
906 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
907 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100908 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000909 instruction_,
910 instruction_->GetDexPc(),
911 this);
912 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
913 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
914
915 RestoreLiveRegisters(codegen, locations);
916 __ jmp(GetExitLabel());
917 }
918
919 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
920
921 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000922 const Location out_;
923 const Location root_;
924
925 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
926};
927
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100928#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100929// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
930#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100931
Roland Levillain4fa13f62015-07-06 18:11:54 +0100932inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700933 switch (cond) {
934 case kCondEQ: return kEqual;
935 case kCondNE: return kNotEqual;
936 case kCondLT: return kLess;
937 case kCondLE: return kLessEqual;
938 case kCondGT: return kGreater;
939 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700940 case kCondB: return kBelow;
941 case kCondBE: return kBelowEqual;
942 case kCondA: return kAbove;
943 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700944 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100945 LOG(FATAL) << "Unreachable";
946 UNREACHABLE();
947}
948
Aart Bike9f37602015-10-09 11:15:55 -0700949// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100950inline Condition X86_64FPCondition(IfCondition cond) {
951 switch (cond) {
952 case kCondEQ: return kEqual;
953 case kCondNE: return kNotEqual;
954 case kCondLT: return kBelow;
955 case kCondLE: return kBelowEqual;
956 case kCondGT: return kAbove;
957 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700958 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100959 };
960 LOG(FATAL) << "Unreachable";
961 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700962}
963
Vladimir Markodc151b22015-10-15 18:02:30 +0100964HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
965 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100966 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +0100967 switch (desired_dispatch_info.code_ptr_location) {
968 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
969 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
970 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
971 return HInvokeStaticOrDirect::DispatchInfo {
972 desired_dispatch_info.method_load_kind,
973 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
974 desired_dispatch_info.method_load_data,
975 0u
976 };
977 default:
978 return desired_dispatch_info;
979 }
980}
981
Serguei Katkov288c7a82016-05-16 11:53:15 +0600982Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
983 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800984 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000985 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
986 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100987 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000988 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100989 uint32_t offset =
990 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
991 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000992 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 }
Vladimir Marko58155012015-08-19 12:49:41 +0000994 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000995 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000996 break;
997 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
998 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
999 break;
1000 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
1001 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001002 method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1003 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001004 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
1005 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001006 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +00001007 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001008 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001009 // Bind a new fixup label at the end of the "movl" insn.
1010 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01001011 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +00001012 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001013 }
Vladimir Marko58155012015-08-19 12:49:41 +00001014 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001015 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001016 Register method_reg;
1017 CpuRegister reg = temp.AsRegister<CpuRegister>();
1018 if (current_method.IsRegister()) {
1019 method_reg = current_method.AsRegister<Register>();
1020 } else {
1021 DCHECK(invoke->GetLocations()->Intrinsified());
1022 DCHECK(!current_method.IsValid());
1023 method_reg = reg.AsRegister();
1024 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1025 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001026 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001027 __ movq(reg,
1028 Address(CpuRegister(method_reg),
1029 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001030 // temp = temp[index_in_cache];
1031 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1032 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001033 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1034 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001035 }
Vladimir Marko58155012015-08-19 12:49:41 +00001036 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001037 return callee_method;
1038}
1039
1040void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1041 Location temp) {
1042 // All registers are assumed to be correctly set up.
1043 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001044
1045 switch (invoke->GetCodePtrLocation()) {
1046 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1047 __ call(&frame_entry_label_);
1048 break;
1049 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001050 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1051 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001052 Label* label = &relative_call_patches_.back().label;
1053 __ call(label); // Bind to the patch label, override at link time.
1054 __ Bind(label); // Bind the label at the end of the "call" insn.
1055 break;
1056 }
1057 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
1058 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +01001059 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
1060 LOG(FATAL) << "Unsupported";
1061 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00001062 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1063 // (callee_method + offset_of_quick_compiled_code)()
1064 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1065 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001066 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001067 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001068 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001069
1070 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001071}
1072
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001073void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1074 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1075 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1076 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001077
1078 // Use the calling convention instead of the location of the receiver, as
1079 // intrinsics may have put the receiver in a different register. In the intrinsics
1080 // slow path, the arguments have been moved to the right place, so here we are
1081 // guaranteed that the receiver is the first register of the calling convention.
1082 InvokeDexCallingConvention calling_convention;
1083 Register receiver = calling_convention.GetRegisterAt(0);
1084
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001085 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001086 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001087 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001088 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001089 // Instead of simply (possibly) unpoisoning `temp` here, we should
1090 // emit a read barrier for the previous class reference load.
1091 // However this is not required in practice, as this is an
1092 // intermediate/temporary reference and because the current
1093 // concurrent copying collector keeps the from-space memory
1094 // intact/accessible until the end of the marking phase (the
1095 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001096 __ MaybeUnpoisonHeapReference(temp);
1097 // temp = temp->GetMethodAt(method_offset);
1098 __ movq(temp, Address(temp, method_offset));
1099 // call temp->GetEntryPoint();
1100 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001101 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001102}
1103
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001104void CodeGeneratorX86_64::RecordSimplePatch() {
1105 if (GetCompilerOptions().GetIncludePatchInformation()) {
1106 simple_patches_.emplace_back();
1107 __ Bind(&simple_patches_.back());
1108 }
1109}
1110
Vladimir Markoaad75c62016-10-03 08:46:48 +00001111void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1112 DCHECK(GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001113 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
1114 __ Bind(&string_patches_.back().label);
1115}
1116
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001117void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
1118 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
1119 __ Bind(&type_patches_.back().label);
1120}
1121
Vladimir Markoaad75c62016-10-03 08:46:48 +00001122Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1123 DCHECK(!GetCompilerOptions().IsBootImage());
1124 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
1125 return &string_patches_.back().label;
1126}
1127
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001128Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1129 uint32_t element_offset) {
1130 // Add a patch entry and return the label.
1131 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1132 return &pc_relative_dex_cache_patches_.back().label;
1133}
1134
Vladimir Markoaad75c62016-10-03 08:46:48 +00001135// The label points to the end of the "movl" or another instruction but the literal offset
1136// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1137constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1138
1139template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1140inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1141 const ArenaDeque<PatchInfo<Label>>& infos,
1142 ArenaVector<LinkerPatch>* linker_patches) {
1143 for (const PatchInfo<Label>& info : infos) {
1144 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1145 linker_patches->push_back(
1146 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1147 }
1148}
1149
Vladimir Marko58155012015-08-19 12:49:41 +00001150void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1151 DCHECK(linker_patches->empty());
1152 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001153 method_patches_.size() +
1154 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001155 pc_relative_dex_cache_patches_.size() +
1156 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001157 string_patches_.size() +
1158 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001159 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001160 for (const PatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001161 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001162 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001163 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001164 for (const PatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001165 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001166 linker_patches->push_back(
1167 LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001168 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001169 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1170 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001171 for (const Label& label : simple_patches_) {
1172 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1173 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1174 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001175 if (!GetCompilerOptions().IsBootImage()) {
1176 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1177 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001178 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001179 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001180 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001181 // These are always PC-relative, see GetSupportedLoadClassKind().
1182 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001183}
1184
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001185void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001186 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001187}
1188
1189void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001190 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001191}
1192
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001193size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1194 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1195 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001196}
1197
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001198size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1199 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1200 return kX86_64WordSize;
1201}
1202
1203size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1204 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1205 return kX86_64WordSize;
1206}
1207
1208size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1209 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1210 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001211}
1212
Calin Juravle175dc732015-08-25 15:42:32 +01001213void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1214 HInstruction* instruction,
1215 uint32_t dex_pc,
1216 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001217 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001218 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1219 if (EntrypointRequiresStackMap(entrypoint)) {
1220 RecordPcInfo(instruction, dex_pc, slow_path);
1221 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001222}
1223
Roland Levillaindec8f632016-07-22 17:10:06 +01001224void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1225 HInstruction* instruction,
1226 SlowPathCode* slow_path) {
1227 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001228 GenerateInvokeRuntime(entry_point_offset);
1229}
1230
1231void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001232 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1233}
1234
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001235static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001236// Use a fake return address register to mimic Quick.
1237static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001238CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001239 const X86_64InstructionSetFeatures& isa_features,
1240 const CompilerOptions& compiler_options,
1241 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001242 : CodeGenerator(graph,
1243 kNumberOfCpuRegisters,
1244 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001245 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001246 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1247 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001248 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001249 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1250 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001251 compiler_options,
1252 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001253 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001254 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001255 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001256 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001257 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001258 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001259 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001260 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1261 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001262 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001263 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1264 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001265 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001266 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001267 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1268}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001269
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001270InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1271 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001272 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273 assembler_(codegen->GetAssembler()),
1274 codegen_(codegen) {}
1275
David Brazdil58282f42016-01-14 12:45:10 +00001276void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001277 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001278 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001279
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001280 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001281 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001282}
1283
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001284static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001285 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001286}
David Srbecky9d8606d2015-04-12 09:35:32 +01001287
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001288static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001289 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001290}
1291
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001292void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001293 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001294 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001295 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001296 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001297 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001298
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001299 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001300 __ testq(CpuRegister(RAX), Address(
1301 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001302 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001303 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001304
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001305 if (HasEmptyFrame()) {
1306 return;
1307 }
1308
Nicolas Geoffray98893962015-01-21 12:32:32 +00001309 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001310 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001311 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001312 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001313 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1314 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001315 }
1316 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001317
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001318 int adjust = GetFrameSize() - GetCoreSpillSize();
1319 __ subq(CpuRegister(RSP), Immediate(adjust));
1320 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001321 uint32_t xmm_spill_location = GetFpuSpillStart();
1322 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001323
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001324 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1325 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001326 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1327 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1328 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001329 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001330 }
1331
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001332 // Save the current method if we need it. Note that we do not
1333 // do this in HCurrentMethod, as the instruction might have been removed
1334 // in the SSA graph.
1335 if (RequiresCurrentMethod()) {
1336 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1337 CpuRegister(kMethodRegisterArgument));
1338 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
1341void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001342 __ cfi().RememberState();
1343 if (!HasEmptyFrame()) {
1344 uint32_t xmm_spill_location = GetFpuSpillStart();
1345 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1346 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1347 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1348 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1349 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1350 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1351 }
1352 }
1353
1354 int adjust = GetFrameSize() - GetCoreSpillSize();
1355 __ addq(CpuRegister(RSP), Immediate(adjust));
1356 __ cfi().AdjustCFAOffset(-adjust);
1357
1358 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1359 Register reg = kCoreCalleeSaves[i];
1360 if (allocated_registers_.ContainsCoreRegister(reg)) {
1361 __ popq(CpuRegister(reg));
1362 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1363 __ cfi().Restore(DWARFReg(reg));
1364 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001365 }
1366 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001367 __ ret();
1368 __ cfi().RestoreState();
1369 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001370}
1371
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001372void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1373 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001374}
1375
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001376void CodeGeneratorX86_64::Move(Location destination, Location source) {
1377 if (source.Equals(destination)) {
1378 return;
1379 }
1380 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001381 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001382 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001383 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001384 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001385 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001386 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001387 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1388 } else if (source.IsConstant()) {
1389 HConstant* constant = source.GetConstant();
1390 if (constant->IsLongConstant()) {
1391 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1392 } else {
1393 Load32BitValue(dest, GetInt32ValueOf(constant));
1394 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001395 } else {
1396 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001397 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001398 }
1399 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001400 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001401 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001402 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001403 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001404 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1405 } else if (source.IsConstant()) {
1406 HConstant* constant = source.GetConstant();
1407 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1408 if (constant->IsFloatConstant()) {
1409 Load32BitValue(dest, static_cast<int32_t>(value));
1410 } else {
1411 Load64BitValue(dest, value);
1412 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001413 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001414 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001415 } else {
1416 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001417 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001418 }
1419 } else if (destination.IsStackSlot()) {
1420 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001421 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001422 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001423 } else if (source.IsFpuRegister()) {
1424 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001425 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001426 } else if (source.IsConstant()) {
1427 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001428 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001429 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001430 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001431 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001432 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1433 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001434 }
1435 } else {
1436 DCHECK(destination.IsDoubleStackSlot());
1437 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001438 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001439 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001440 } else if (source.IsFpuRegister()) {
1441 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001442 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001443 } else if (source.IsConstant()) {
1444 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001445 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1446 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001447 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001448 } else {
1449 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001450 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1451 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001452 }
1453 }
1454}
1455
Calin Juravle175dc732015-08-25 15:42:32 +01001456void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1457 DCHECK(location.IsRegister());
1458 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1459}
1460
Calin Juravlee460d1d2015-09-29 04:52:17 +01001461void CodeGeneratorX86_64::MoveLocation(
1462 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1463 Move(dst, src);
1464}
1465
1466void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1467 if (location.IsRegister()) {
1468 locations->AddTemp(location);
1469 } else {
1470 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1471 }
1472}
1473
David Brazdilfc6a86a2015-06-26 10:33:45 +00001474void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001475 DCHECK(!successor->IsExitBlock());
1476
1477 HBasicBlock* block = got->GetBlock();
1478 HInstruction* previous = got->GetPrevious();
1479
1480 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001481 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001482 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1483 return;
1484 }
1485
1486 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1487 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1488 }
1489 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001490 __ jmp(codegen_->GetLabelOf(successor));
1491 }
1492}
1493
David Brazdilfc6a86a2015-06-26 10:33:45 +00001494void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1495 got->SetLocations(nullptr);
1496}
1497
1498void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1499 HandleGoto(got, got->GetSuccessor());
1500}
1501
1502void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1503 try_boundary->SetLocations(nullptr);
1504}
1505
1506void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1507 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1508 if (!successor->IsExitBlock()) {
1509 HandleGoto(try_boundary, successor);
1510 }
1511}
1512
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001513void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1514 exit->SetLocations(nullptr);
1515}
1516
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001517void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001518}
1519
Mark Mendell152408f2015-12-31 12:28:50 -05001520template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001521void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001522 LabelType* true_label,
1523 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001524 if (cond->IsFPConditionTrueIfNaN()) {
1525 __ j(kUnordered, true_label);
1526 } else if (cond->IsFPConditionFalseIfNaN()) {
1527 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001528 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001529 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001530}
1531
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001532void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001533 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001534
Mark Mendellc4701932015-04-10 13:18:51 -04001535 Location left = locations->InAt(0);
1536 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001537 Primitive::Type type = condition->InputAt(0)->GetType();
1538 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001539 case Primitive::kPrimBoolean:
1540 case Primitive::kPrimByte:
1541 case Primitive::kPrimChar:
1542 case Primitive::kPrimShort:
1543 case Primitive::kPrimInt:
1544 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001545 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001546 break;
1547 }
Mark Mendellc4701932015-04-10 13:18:51 -04001548 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001549 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001550 break;
1551 }
1552 case Primitive::kPrimFloat: {
1553 if (right.IsFpuRegister()) {
1554 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1555 } else if (right.IsConstant()) {
1556 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1557 codegen_->LiteralFloatAddress(
1558 right.GetConstant()->AsFloatConstant()->GetValue()));
1559 } else {
1560 DCHECK(right.IsStackSlot());
1561 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1562 Address(CpuRegister(RSP), right.GetStackIndex()));
1563 }
Mark Mendellc4701932015-04-10 13:18:51 -04001564 break;
1565 }
1566 case Primitive::kPrimDouble: {
1567 if (right.IsFpuRegister()) {
1568 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1569 } else if (right.IsConstant()) {
1570 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1571 codegen_->LiteralDoubleAddress(
1572 right.GetConstant()->AsDoubleConstant()->GetValue()));
1573 } else {
1574 DCHECK(right.IsDoubleStackSlot());
1575 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1576 Address(CpuRegister(RSP), right.GetStackIndex()));
1577 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001578 break;
1579 }
1580 default:
1581 LOG(FATAL) << "Unexpected condition type " << type;
1582 }
1583}
1584
1585template<class LabelType>
1586void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1587 LabelType* true_target_in,
1588 LabelType* false_target_in) {
1589 // Generated branching requires both targets to be explicit. If either of the
1590 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1591 LabelType fallthrough_target;
1592 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1593 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1594
1595 // Generate the comparison to set the CC.
1596 GenerateCompareTest(condition);
1597
1598 // Now generate the correct jump(s).
1599 Primitive::Type type = condition->InputAt(0)->GetType();
1600 switch (type) {
1601 case Primitive::kPrimLong: {
1602 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1603 break;
1604 }
1605 case Primitive::kPrimFloat: {
1606 GenerateFPJumps(condition, true_target, false_target);
1607 break;
1608 }
1609 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001610 GenerateFPJumps(condition, true_target, false_target);
1611 break;
1612 }
1613 default:
1614 LOG(FATAL) << "Unexpected condition type " << type;
1615 }
1616
David Brazdil0debae72015-11-12 18:37:00 +00001617 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001618 __ jmp(false_target);
1619 }
David Brazdil0debae72015-11-12 18:37:00 +00001620
1621 if (fallthrough_target.IsLinked()) {
1622 __ Bind(&fallthrough_target);
1623 }
Mark Mendellc4701932015-04-10 13:18:51 -04001624}
1625
David Brazdil0debae72015-11-12 18:37:00 +00001626static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1627 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1628 // are set only strictly before `branch`. We can't use the eflags on long
1629 // conditions if they are materialized due to the complex branching.
1630 return cond->IsCondition() &&
1631 cond->GetNext() == branch &&
1632 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1633}
1634
Mark Mendell152408f2015-12-31 12:28:50 -05001635template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001636void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001637 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001638 LabelType* true_target,
1639 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001640 HInstruction* cond = instruction->InputAt(condition_input_index);
1641
1642 if (true_target == nullptr && false_target == nullptr) {
1643 // Nothing to do. The code always falls through.
1644 return;
1645 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001646 // Constant condition, statically compared against "true" (integer value 1).
1647 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001648 if (true_target != nullptr) {
1649 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001650 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001651 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001652 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001653 if (false_target != nullptr) {
1654 __ jmp(false_target);
1655 }
1656 }
1657 return;
1658 }
1659
1660 // The following code generates these patterns:
1661 // (1) true_target == nullptr && false_target != nullptr
1662 // - opposite condition true => branch to false_target
1663 // (2) true_target != nullptr && false_target == nullptr
1664 // - condition true => branch to true_target
1665 // (3) true_target != nullptr && false_target != nullptr
1666 // - condition true => branch to true_target
1667 // - branch to false_target
1668 if (IsBooleanValueOrMaterializedCondition(cond)) {
1669 if (AreEflagsSetFrom(cond, instruction)) {
1670 if (true_target == nullptr) {
1671 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1672 } else {
1673 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1674 }
1675 } else {
1676 // Materialized condition, compare against 0.
1677 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1678 if (lhs.IsRegister()) {
1679 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1680 } else {
1681 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1682 }
1683 if (true_target == nullptr) {
1684 __ j(kEqual, false_target);
1685 } else {
1686 __ j(kNotEqual, true_target);
1687 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001688 }
1689 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001690 // Condition has not been materialized, use its inputs as the
1691 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001692 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001693
David Brazdil0debae72015-11-12 18:37:00 +00001694 // If this is a long or FP comparison that has been folded into
1695 // the HCondition, generate the comparison directly.
1696 Primitive::Type type = condition->InputAt(0)->GetType();
1697 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1698 GenerateCompareTestAndBranch(condition, true_target, false_target);
1699 return;
1700 }
1701
1702 Location lhs = condition->GetLocations()->InAt(0);
1703 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001704 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001705 if (true_target == nullptr) {
1706 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1707 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001708 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001709 }
Dave Allison20dfc792014-06-16 20:44:29 -07001710 }
David Brazdil0debae72015-11-12 18:37:00 +00001711
1712 // If neither branch falls through (case 3), the conditional branch to `true_target`
1713 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1714 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001715 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001716 }
1717}
1718
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001719void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001720 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1721 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001722 locations->SetInAt(0, Location::Any());
1723 }
1724}
1725
1726void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001727 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1728 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1729 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1730 nullptr : codegen_->GetLabelOf(true_successor);
1731 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1732 nullptr : codegen_->GetLabelOf(false_successor);
1733 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001734}
1735
1736void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1737 LocationSummary* locations = new (GetGraph()->GetArena())
1738 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001739 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001740 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001741 locations->SetInAt(0, Location::Any());
1742 }
1743}
1744
1745void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001746 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001747 GenerateTestAndBranch<Label>(deoptimize,
1748 /* condition_input_index */ 0,
1749 slow_path->GetEntryLabel(),
1750 /* false_target */ nullptr);
1751}
1752
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001753static bool SelectCanUseCMOV(HSelect* select) {
1754 // There are no conditional move instructions for XMMs.
1755 if (Primitive::IsFloatingPointType(select->GetType())) {
1756 return false;
1757 }
1758
1759 // A FP condition doesn't generate the single CC that we need.
1760 HInstruction* condition = select->GetCondition();
1761 if (condition->IsCondition() &&
1762 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1763 return false;
1764 }
1765
1766 // We can generate a CMOV for this Select.
1767 return true;
1768}
1769
David Brazdil74eb1b22015-12-14 11:44:01 +00001770void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1771 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1772 if (Primitive::IsFloatingPointType(select->GetType())) {
1773 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001774 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001775 } else {
1776 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001777 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001778 if (select->InputAt(1)->IsConstant()) {
1779 locations->SetInAt(1, Location::RequiresRegister());
1780 } else {
1781 locations->SetInAt(1, Location::Any());
1782 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001783 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001784 locations->SetInAt(1, Location::Any());
1785 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001786 }
1787 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1788 locations->SetInAt(2, Location::RequiresRegister());
1789 }
1790 locations->SetOut(Location::SameAsFirstInput());
1791}
1792
1793void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1794 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001795 if (SelectCanUseCMOV(select)) {
1796 // If both the condition and the source types are integer, we can generate
1797 // a CMOV to implement Select.
1798 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001799 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001800 DCHECK(locations->InAt(0).Equals(locations->Out()));
1801
1802 HInstruction* select_condition = select->GetCondition();
1803 Condition cond = kNotEqual;
1804
1805 // Figure out how to test the 'condition'.
1806 if (select_condition->IsCondition()) {
1807 HCondition* condition = select_condition->AsCondition();
1808 if (!condition->IsEmittedAtUseSite()) {
1809 // This was a previously materialized condition.
1810 // Can we use the existing condition code?
1811 if (AreEflagsSetFrom(condition, select)) {
1812 // Materialization was the previous instruction. Condition codes are right.
1813 cond = X86_64IntegerCondition(condition->GetCondition());
1814 } else {
1815 // No, we have to recreate the condition code.
1816 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1817 __ testl(cond_reg, cond_reg);
1818 }
1819 } else {
1820 GenerateCompareTest(condition);
1821 cond = X86_64IntegerCondition(condition->GetCondition());
1822 }
1823 } else {
1824 // Must be a boolean condition, which needs to be compared to 0.
1825 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1826 __ testl(cond_reg, cond_reg);
1827 }
1828
1829 // If the condition is true, overwrite the output, which already contains false.
1830 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001831 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1832 if (value_true_loc.IsRegister()) {
1833 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1834 } else {
1835 __ cmov(cond,
1836 value_false,
1837 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1838 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001839 } else {
1840 NearLabel false_target;
1841 GenerateTestAndBranch<NearLabel>(select,
1842 /* condition_input_index */ 2,
1843 /* true_target */ nullptr,
1844 &false_target);
1845 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1846 __ Bind(&false_target);
1847 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001848}
1849
David Srbecky0cf44932015-12-09 14:09:59 +00001850void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1851 new (GetGraph()->GetArena()) LocationSummary(info);
1852}
1853
David Srbeckyd28f4a02016-03-14 17:14:24 +00001854void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1855 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001856}
1857
1858void CodeGeneratorX86_64::GenerateNop() {
1859 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001860}
1861
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001862void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001863 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001864 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001865 // Handle the long/FP comparisons made in instruction simplification.
1866 switch (cond->InputAt(0)->GetType()) {
1867 case Primitive::kPrimLong:
1868 locations->SetInAt(0, Location::RequiresRegister());
1869 locations->SetInAt(1, Location::Any());
1870 break;
1871 case Primitive::kPrimFloat:
1872 case Primitive::kPrimDouble:
1873 locations->SetInAt(0, Location::RequiresFpuRegister());
1874 locations->SetInAt(1, Location::Any());
1875 break;
1876 default:
1877 locations->SetInAt(0, Location::RequiresRegister());
1878 locations->SetInAt(1, Location::Any());
1879 break;
1880 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001881 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001882 locations->SetOut(Location::RequiresRegister());
1883 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001884}
1885
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001886void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001887 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001888 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001889 }
Mark Mendellc4701932015-04-10 13:18:51 -04001890
1891 LocationSummary* locations = cond->GetLocations();
1892 Location lhs = locations->InAt(0);
1893 Location rhs = locations->InAt(1);
1894 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001895 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001896
1897 switch (cond->InputAt(0)->GetType()) {
1898 default:
1899 // Integer case.
1900
1901 // Clear output register: setcc only sets the low byte.
1902 __ xorl(reg, reg);
1903
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001904 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001905 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001906 return;
1907 case Primitive::kPrimLong:
1908 // Clear output register: setcc only sets the low byte.
1909 __ xorl(reg, reg);
1910
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001911 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001912 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001913 return;
1914 case Primitive::kPrimFloat: {
1915 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1916 if (rhs.IsConstant()) {
1917 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1918 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1919 } else if (rhs.IsStackSlot()) {
1920 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1921 } else {
1922 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1923 }
1924 GenerateFPJumps(cond, &true_label, &false_label);
1925 break;
1926 }
1927 case Primitive::kPrimDouble: {
1928 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1929 if (rhs.IsConstant()) {
1930 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1931 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1932 } else if (rhs.IsDoubleStackSlot()) {
1933 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1934 } else {
1935 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1936 }
1937 GenerateFPJumps(cond, &true_label, &false_label);
1938 break;
1939 }
1940 }
1941
1942 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001943 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001944
Roland Levillain4fa13f62015-07-06 18:11:54 +01001945 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001946 __ Bind(&false_label);
1947 __ xorl(reg, reg);
1948 __ jmp(&done_label);
1949
Roland Levillain4fa13f62015-07-06 18:11:54 +01001950 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001951 __ Bind(&true_label);
1952 __ movl(reg, Immediate(1));
1953 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001954}
1955
1956void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001957 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001958}
1959
1960void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001961 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001962}
1963
1964void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001965 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001966}
1967
1968void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001969 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001970}
1971
1972void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001973 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001974}
1975
1976void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001977 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001978}
1979
1980void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001981 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001982}
1983
1984void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001985 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001986}
1987
1988void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001989 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001990}
1991
1992void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001994}
1995
1996void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001997 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001998}
1999
2000void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002001 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002002}
2003
Aart Bike9f37602015-10-09 11:15:55 -07002004void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002005 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002006}
2007
2008void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002009 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002010}
2011
2012void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002013 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002014}
2015
2016void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002017 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002018}
2019
2020void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002021 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002022}
2023
2024void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002025 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002026}
2027
2028void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002029 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002030}
2031
2032void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002033 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002034}
2035
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002036void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002037 LocationSummary* locations =
2038 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002039 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002040 case Primitive::kPrimBoolean:
2041 case Primitive::kPrimByte:
2042 case Primitive::kPrimShort:
2043 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002044 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002045 case Primitive::kPrimLong: {
2046 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002047 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002048 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2049 break;
2050 }
2051 case Primitive::kPrimFloat:
2052 case Primitive::kPrimDouble: {
2053 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002054 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002055 locations->SetOut(Location::RequiresRegister());
2056 break;
2057 }
2058 default:
2059 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2060 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002061}
2062
2063void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002064 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002065 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002066 Location left = locations->InAt(0);
2067 Location right = locations->InAt(1);
2068
Mark Mendell0c9497d2015-08-21 09:30:05 -04002069 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002070 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002071 Condition less_cond = kLess;
2072
Calin Juravleddb7df22014-11-25 20:56:51 +00002073 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002074 case Primitive::kPrimBoolean:
2075 case Primitive::kPrimByte:
2076 case Primitive::kPrimShort:
2077 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002078 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002079 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002080 break;
2081 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002082 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002083 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002084 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002085 }
2086 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002087 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2088 if (right.IsConstant()) {
2089 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2090 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2091 } else if (right.IsStackSlot()) {
2092 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2093 } else {
2094 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2095 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002096 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002097 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002098 break;
2099 }
2100 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002101 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2102 if (right.IsConstant()) {
2103 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2104 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2105 } else if (right.IsDoubleStackSlot()) {
2106 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2107 } else {
2108 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2109 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002110 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002111 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002112 break;
2113 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002114 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002115 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002116 }
Aart Bika19616e2016-02-01 18:57:58 -08002117
Calin Juravleddb7df22014-11-25 20:56:51 +00002118 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002119 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002120 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002121
Calin Juravle91debbc2014-11-26 19:01:09 +00002122 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002123 __ movl(out, Immediate(1));
2124 __ jmp(&done);
2125
2126 __ Bind(&less);
2127 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002128
2129 __ Bind(&done);
2130}
2131
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002132void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002133 LocationSummary* locations =
2134 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002135 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002136}
2137
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002138void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002139 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002140}
2141
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002142void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2143 LocationSummary* locations =
2144 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2145 locations->SetOut(Location::ConstantLocation(constant));
2146}
2147
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002148void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002149 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002150}
2151
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002152void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002153 LocationSummary* locations =
2154 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002155 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002156}
2157
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002158void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002159 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002160}
2161
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002162void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2163 LocationSummary* locations =
2164 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2165 locations->SetOut(Location::ConstantLocation(constant));
2166}
2167
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002168void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002169 // Will be generated at use site.
2170}
2171
2172void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2173 LocationSummary* locations =
2174 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2175 locations->SetOut(Location::ConstantLocation(constant));
2176}
2177
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002178void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2179 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002180 // Will be generated at use site.
2181}
2182
Calin Juravle27df7582015-04-17 19:12:31 +01002183void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2184 memory_barrier->SetLocations(nullptr);
2185}
2186
2187void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002188 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002189}
2190
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002191void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2192 ret->SetLocations(nullptr);
2193}
2194
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002195void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002196 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002197}
2198
2199void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002200 LocationSummary* locations =
2201 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002202 switch (ret->InputAt(0)->GetType()) {
2203 case Primitive::kPrimBoolean:
2204 case Primitive::kPrimByte:
2205 case Primitive::kPrimChar:
2206 case Primitive::kPrimShort:
2207 case Primitive::kPrimInt:
2208 case Primitive::kPrimNot:
2209 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002210 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002211 break;
2212
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002213 case Primitive::kPrimFloat:
2214 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002215 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002216 break;
2217
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002219 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002220 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002221}
2222
2223void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2224 if (kIsDebugBuild) {
2225 switch (ret->InputAt(0)->GetType()) {
2226 case Primitive::kPrimBoolean:
2227 case Primitive::kPrimByte:
2228 case Primitive::kPrimChar:
2229 case Primitive::kPrimShort:
2230 case Primitive::kPrimInt:
2231 case Primitive::kPrimNot:
2232 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002233 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002234 break;
2235
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002236 case Primitive::kPrimFloat:
2237 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002238 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002239 XMM0);
2240 break;
2241
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002242 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002243 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244 }
2245 }
2246 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002247}
2248
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002249Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2250 switch (type) {
2251 case Primitive::kPrimBoolean:
2252 case Primitive::kPrimByte:
2253 case Primitive::kPrimChar:
2254 case Primitive::kPrimShort:
2255 case Primitive::kPrimInt:
2256 case Primitive::kPrimNot:
2257 case Primitive::kPrimLong:
2258 return Location::RegisterLocation(RAX);
2259
2260 case Primitive::kPrimVoid:
2261 return Location::NoLocation();
2262
2263 case Primitive::kPrimDouble:
2264 case Primitive::kPrimFloat:
2265 return Location::FpuRegisterLocation(XMM0);
2266 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002267
2268 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002269}
2270
2271Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2272 return Location::RegisterLocation(kMethodRegisterArgument);
2273}
2274
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002275Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002276 switch (type) {
2277 case Primitive::kPrimBoolean:
2278 case Primitive::kPrimByte:
2279 case Primitive::kPrimChar:
2280 case Primitive::kPrimShort:
2281 case Primitive::kPrimInt:
2282 case Primitive::kPrimNot: {
2283 uint32_t index = gp_index_++;
2284 stack_index_++;
2285 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002286 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002287 } else {
2288 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2289 }
2290 }
2291
2292 case Primitive::kPrimLong: {
2293 uint32_t index = gp_index_;
2294 stack_index_ += 2;
2295 if (index < calling_convention.GetNumberOfRegisters()) {
2296 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002297 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002298 } else {
2299 gp_index_ += 2;
2300 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2301 }
2302 }
2303
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002304 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002305 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002306 stack_index_++;
2307 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002308 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002309 } else {
2310 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2311 }
2312 }
2313
2314 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002315 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002316 stack_index_ += 2;
2317 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002318 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002319 } else {
2320 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2321 }
2322 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002323
2324 case Primitive::kPrimVoid:
2325 LOG(FATAL) << "Unexpected parameter type " << type;
2326 break;
2327 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002328 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002329}
2330
Calin Juravle175dc732015-08-25 15:42:32 +01002331void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2332 // The trampoline uses the same calling convention as dex calling conventions,
2333 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2334 // the method_idx.
2335 HandleInvoke(invoke);
2336}
2337
2338void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2339 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2340}
2341
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002342void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002343 // Explicit clinit checks triggered by static invokes must have been pruned by
2344 // art::PrepareForRegisterAllocation.
2345 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002346
Mark Mendellfb8d2792015-03-31 22:16:59 -04002347 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002348 if (intrinsic.TryDispatch(invoke)) {
2349 return;
2350 }
2351
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002352 HandleInvoke(invoke);
2353}
2354
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002355static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2356 if (invoke->GetLocations()->Intrinsified()) {
2357 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2358 intrinsic.Dispatch(invoke);
2359 return true;
2360 }
2361 return false;
2362}
2363
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002364void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002365 // Explicit clinit checks triggered by static invokes must have been pruned by
2366 // art::PrepareForRegisterAllocation.
2367 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002368
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002369 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2370 return;
2371 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002372
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002373 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002374 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002375 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002376 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002377}
2378
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002379void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002380 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002381 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002382}
2383
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002384void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002385 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002386 if (intrinsic.TryDispatch(invoke)) {
2387 return;
2388 }
2389
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002390 HandleInvoke(invoke);
2391}
2392
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002393void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002394 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2395 return;
2396 }
2397
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002398 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002399 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002400 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002401}
2402
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002403void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2404 HandleInvoke(invoke);
2405 // Add the hidden argument.
2406 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2407}
2408
2409void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2410 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002411 LocationSummary* locations = invoke->GetLocations();
2412 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2413 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002414 Location receiver = locations->InAt(0);
2415 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2416
Roland Levillain0d5a2812015-11-13 10:07:31 +00002417 // Set the hidden argument. This is safe to do this here, as RAX
2418 // won't be modified thereafter, before the `call` instruction.
2419 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002420 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002421
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002422 if (receiver.IsStackSlot()) {
2423 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002424 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002425 __ movl(temp, Address(temp, class_offset));
2426 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002427 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002428 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002429 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002430 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002431 // Instead of simply (possibly) unpoisoning `temp` here, we should
2432 // emit a read barrier for the previous class reference load.
2433 // However this is not required in practice, as this is an
2434 // intermediate/temporary reference and because the current
2435 // concurrent copying collector keeps the from-space memory
2436 // intact/accessible until the end of the marking phase (the
2437 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002438 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002439 // temp = temp->GetAddressOfIMT()
2440 __ movq(temp,
2441 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2442 // temp = temp->GetImtEntryAt(method_offset);
2443 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002444 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002445 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002446 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002447 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002448 __ call(Address(
2449 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002450
2451 DCHECK(!codegen_->IsLeafMethod());
2452 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2453}
2454
Roland Levillain88cb1752014-10-20 16:36:47 +01002455void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2456 LocationSummary* locations =
2457 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2458 switch (neg->GetResultType()) {
2459 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002460 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002461 locations->SetInAt(0, Location::RequiresRegister());
2462 locations->SetOut(Location::SameAsFirstInput());
2463 break;
2464
Roland Levillain88cb1752014-10-20 16:36:47 +01002465 case Primitive::kPrimFloat:
2466 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002467 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002468 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002469 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002470 break;
2471
2472 default:
2473 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2474 }
2475}
2476
2477void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2478 LocationSummary* locations = neg->GetLocations();
2479 Location out = locations->Out();
2480 Location in = locations->InAt(0);
2481 switch (neg->GetResultType()) {
2482 case Primitive::kPrimInt:
2483 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002484 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002485 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002486 break;
2487
2488 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002489 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002490 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002491 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002492 break;
2493
Roland Levillain5368c212014-11-27 15:03:41 +00002494 case Primitive::kPrimFloat: {
2495 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002496 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002497 // Implement float negation with an exclusive or with value
2498 // 0x80000000 (mask for bit 31, representing the sign of a
2499 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002500 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002501 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002502 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002503 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002504
Roland Levillain5368c212014-11-27 15:03:41 +00002505 case Primitive::kPrimDouble: {
2506 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002507 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002508 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002509 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002510 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002511 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002512 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002513 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002514 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002515
2516 default:
2517 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2518 }
2519}
2520
Roland Levillaindff1f282014-11-05 14:15:05 +00002521void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2522 LocationSummary* locations =
2523 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2524 Primitive::Type result_type = conversion->GetResultType();
2525 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002526 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002527
David Brazdilb2bd1c52015-03-25 11:17:37 +00002528 // The Java language does not allow treating boolean as an integral type but
2529 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002530
Roland Levillaindff1f282014-11-05 14:15:05 +00002531 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002532 case Primitive::kPrimByte:
2533 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002534 case Primitive::kPrimLong:
2535 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002536 case Primitive::kPrimBoolean:
2537 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002538 case Primitive::kPrimShort:
2539 case Primitive::kPrimInt:
2540 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002541 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002542 locations->SetInAt(0, Location::Any());
2543 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2544 break;
2545
2546 default:
2547 LOG(FATAL) << "Unexpected type conversion from " << input_type
2548 << " to " << result_type;
2549 }
2550 break;
2551
Roland Levillain01a8d712014-11-14 16:27:39 +00002552 case Primitive::kPrimShort:
2553 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002554 case Primitive::kPrimLong:
2555 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002556 case Primitive::kPrimBoolean:
2557 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002558 case Primitive::kPrimByte:
2559 case Primitive::kPrimInt:
2560 case Primitive::kPrimChar:
2561 // Processing a Dex `int-to-short' instruction.
2562 locations->SetInAt(0, Location::Any());
2563 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2564 break;
2565
2566 default:
2567 LOG(FATAL) << "Unexpected type conversion from " << input_type
2568 << " to " << result_type;
2569 }
2570 break;
2571
Roland Levillain946e1432014-11-11 17:35:19 +00002572 case Primitive::kPrimInt:
2573 switch (input_type) {
2574 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002575 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002576 locations->SetInAt(0, Location::Any());
2577 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2578 break;
2579
2580 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002581 // Processing a Dex `float-to-int' instruction.
2582 locations->SetInAt(0, Location::RequiresFpuRegister());
2583 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002584 break;
2585
Roland Levillain946e1432014-11-11 17:35:19 +00002586 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002587 // Processing a Dex `double-to-int' instruction.
2588 locations->SetInAt(0, Location::RequiresFpuRegister());
2589 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002590 break;
2591
2592 default:
2593 LOG(FATAL) << "Unexpected type conversion from " << input_type
2594 << " to " << result_type;
2595 }
2596 break;
2597
Roland Levillaindff1f282014-11-05 14:15:05 +00002598 case Primitive::kPrimLong:
2599 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002600 case Primitive::kPrimBoolean:
2601 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002602 case Primitive::kPrimByte:
2603 case Primitive::kPrimShort:
2604 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002605 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002606 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002607 // TODO: We would benefit from a (to-be-implemented)
2608 // Location::RegisterOrStackSlot requirement for this input.
2609 locations->SetInAt(0, Location::RequiresRegister());
2610 locations->SetOut(Location::RequiresRegister());
2611 break;
2612
2613 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002614 // Processing a Dex `float-to-long' instruction.
2615 locations->SetInAt(0, Location::RequiresFpuRegister());
2616 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002617 break;
2618
Roland Levillaindff1f282014-11-05 14:15:05 +00002619 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002620 // Processing a Dex `double-to-long' instruction.
2621 locations->SetInAt(0, Location::RequiresFpuRegister());
2622 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002623 break;
2624
2625 default:
2626 LOG(FATAL) << "Unexpected type conversion from " << input_type
2627 << " to " << result_type;
2628 }
2629 break;
2630
Roland Levillain981e4542014-11-14 11:47:14 +00002631 case Primitive::kPrimChar:
2632 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002633 case Primitive::kPrimLong:
2634 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002635 case Primitive::kPrimBoolean:
2636 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002637 case Primitive::kPrimByte:
2638 case Primitive::kPrimShort:
2639 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002640 // Processing a Dex `int-to-char' instruction.
2641 locations->SetInAt(0, Location::Any());
2642 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2643 break;
2644
2645 default:
2646 LOG(FATAL) << "Unexpected type conversion from " << input_type
2647 << " to " << result_type;
2648 }
2649 break;
2650
Roland Levillaindff1f282014-11-05 14:15:05 +00002651 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002652 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002653 case Primitive::kPrimBoolean:
2654 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002655 case Primitive::kPrimByte:
2656 case Primitive::kPrimShort:
2657 case Primitive::kPrimInt:
2658 case Primitive::kPrimChar:
2659 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002660 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002661 locations->SetOut(Location::RequiresFpuRegister());
2662 break;
2663
2664 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002665 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002666 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002667 locations->SetOut(Location::RequiresFpuRegister());
2668 break;
2669
Roland Levillaincff13742014-11-17 14:32:17 +00002670 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002671 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002672 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002673 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002674 break;
2675
2676 default:
2677 LOG(FATAL) << "Unexpected type conversion from " << input_type
2678 << " to " << result_type;
2679 };
2680 break;
2681
Roland Levillaindff1f282014-11-05 14:15:05 +00002682 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002683 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002684 case Primitive::kPrimBoolean:
2685 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002686 case Primitive::kPrimByte:
2687 case Primitive::kPrimShort:
2688 case Primitive::kPrimInt:
2689 case Primitive::kPrimChar:
2690 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002691 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002692 locations->SetOut(Location::RequiresFpuRegister());
2693 break;
2694
2695 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002696 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002697 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002698 locations->SetOut(Location::RequiresFpuRegister());
2699 break;
2700
Roland Levillaincff13742014-11-17 14:32:17 +00002701 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002702 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002703 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002704 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002705 break;
2706
2707 default:
2708 LOG(FATAL) << "Unexpected type conversion from " << input_type
2709 << " to " << result_type;
2710 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002711 break;
2712
2713 default:
2714 LOG(FATAL) << "Unexpected type conversion from " << input_type
2715 << " to " << result_type;
2716 }
2717}
2718
2719void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2720 LocationSummary* locations = conversion->GetLocations();
2721 Location out = locations->Out();
2722 Location in = locations->InAt(0);
2723 Primitive::Type result_type = conversion->GetResultType();
2724 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002725 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002726 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002727 case Primitive::kPrimByte:
2728 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002729 case Primitive::kPrimLong:
2730 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002731 case Primitive::kPrimBoolean:
2732 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002733 case Primitive::kPrimShort:
2734 case Primitive::kPrimInt:
2735 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002736 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002737 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002738 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002739 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002740 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002741 Address(CpuRegister(RSP), in.GetStackIndex()));
2742 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002743 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002744 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002745 }
2746 break;
2747
2748 default:
2749 LOG(FATAL) << "Unexpected type conversion from " << input_type
2750 << " to " << result_type;
2751 }
2752 break;
2753
Roland Levillain01a8d712014-11-14 16:27:39 +00002754 case Primitive::kPrimShort:
2755 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002756 case Primitive::kPrimLong:
2757 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002758 case Primitive::kPrimBoolean:
2759 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002760 case Primitive::kPrimByte:
2761 case Primitive::kPrimInt:
2762 case Primitive::kPrimChar:
2763 // Processing a Dex `int-to-short' instruction.
2764 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002765 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002766 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002767 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002768 Address(CpuRegister(RSP), in.GetStackIndex()));
2769 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002770 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002771 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002772 }
2773 break;
2774
2775 default:
2776 LOG(FATAL) << "Unexpected type conversion from " << input_type
2777 << " to " << result_type;
2778 }
2779 break;
2780
Roland Levillain946e1432014-11-11 17:35:19 +00002781 case Primitive::kPrimInt:
2782 switch (input_type) {
2783 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002784 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002785 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002786 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002787 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002788 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002789 Address(CpuRegister(RSP), in.GetStackIndex()));
2790 } else {
2791 DCHECK(in.IsConstant());
2792 DCHECK(in.GetConstant()->IsLongConstant());
2793 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002794 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002795 }
2796 break;
2797
Roland Levillain3f8f9362014-12-02 17:45:01 +00002798 case Primitive::kPrimFloat: {
2799 // Processing a Dex `float-to-int' instruction.
2800 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2801 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002802 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002803
2804 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002805 // if input >= (float)INT_MAX goto done
2806 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002807 __ j(kAboveEqual, &done);
2808 // if input == NaN goto nan
2809 __ j(kUnordered, &nan);
2810 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002811 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002812 __ jmp(&done);
2813 __ Bind(&nan);
2814 // output = 0
2815 __ xorl(output, output);
2816 __ Bind(&done);
2817 break;
2818 }
2819
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002820 case Primitive::kPrimDouble: {
2821 // Processing a Dex `double-to-int' instruction.
2822 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2823 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002824 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002825
2826 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002827 // if input >= (double)INT_MAX goto done
2828 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002829 __ j(kAboveEqual, &done);
2830 // if input == NaN goto nan
2831 __ j(kUnordered, &nan);
2832 // output = double-to-int-truncate(input)
2833 __ cvttsd2si(output, input);
2834 __ jmp(&done);
2835 __ Bind(&nan);
2836 // output = 0
2837 __ xorl(output, output);
2838 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002839 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002840 }
Roland Levillain946e1432014-11-11 17:35:19 +00002841
2842 default:
2843 LOG(FATAL) << "Unexpected type conversion from " << input_type
2844 << " to " << result_type;
2845 }
2846 break;
2847
Roland Levillaindff1f282014-11-05 14:15:05 +00002848 case Primitive::kPrimLong:
2849 switch (input_type) {
2850 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002851 case Primitive::kPrimBoolean:
2852 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002853 case Primitive::kPrimByte:
2854 case Primitive::kPrimShort:
2855 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002856 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002857 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002858 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002859 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002860 break;
2861
Roland Levillain624279f2014-12-04 11:54:28 +00002862 case Primitive::kPrimFloat: {
2863 // Processing a Dex `float-to-long' instruction.
2864 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2865 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002866 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002867
Mark Mendell92e83bf2015-05-07 11:25:03 -04002868 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002869 // if input >= (float)LONG_MAX goto done
2870 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002871 __ j(kAboveEqual, &done);
2872 // if input == NaN goto nan
2873 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002874 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002875 __ cvttss2si(output, input, true);
2876 __ jmp(&done);
2877 __ Bind(&nan);
2878 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002879 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002880 __ Bind(&done);
2881 break;
2882 }
2883
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002884 case Primitive::kPrimDouble: {
2885 // Processing a Dex `double-to-long' instruction.
2886 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2887 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002888 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002889
Mark Mendell92e83bf2015-05-07 11:25:03 -04002890 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002891 // if input >= (double)LONG_MAX goto done
2892 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002893 __ j(kAboveEqual, &done);
2894 // if input == NaN goto nan
2895 __ j(kUnordered, &nan);
2896 // output = double-to-long-truncate(input)
2897 __ cvttsd2si(output, input, true);
2898 __ jmp(&done);
2899 __ Bind(&nan);
2900 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002901 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002902 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002903 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002904 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002905
2906 default:
2907 LOG(FATAL) << "Unexpected type conversion from " << input_type
2908 << " to " << result_type;
2909 }
2910 break;
2911
Roland Levillain981e4542014-11-14 11:47:14 +00002912 case Primitive::kPrimChar:
2913 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002914 case Primitive::kPrimLong:
2915 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002916 case Primitive::kPrimBoolean:
2917 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002918 case Primitive::kPrimByte:
2919 case Primitive::kPrimShort:
2920 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002921 // Processing a Dex `int-to-char' instruction.
2922 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002923 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002924 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002925 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002926 Address(CpuRegister(RSP), in.GetStackIndex()));
2927 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002928 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002929 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002930 }
2931 break;
2932
2933 default:
2934 LOG(FATAL) << "Unexpected type conversion from " << input_type
2935 << " to " << result_type;
2936 }
2937 break;
2938
Roland Levillaindff1f282014-11-05 14:15:05 +00002939 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002940 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002941 case Primitive::kPrimBoolean:
2942 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002943 case Primitive::kPrimByte:
2944 case Primitive::kPrimShort:
2945 case Primitive::kPrimInt:
2946 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002947 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002948 if (in.IsRegister()) {
2949 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2950 } else if (in.IsConstant()) {
2951 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2952 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002953 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002954 } else {
2955 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2956 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2957 }
Roland Levillaincff13742014-11-17 14:32:17 +00002958 break;
2959
2960 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002961 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002962 if (in.IsRegister()) {
2963 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2964 } else if (in.IsConstant()) {
2965 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2966 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002967 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002968 } else {
2969 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2970 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2971 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002972 break;
2973
Roland Levillaincff13742014-11-17 14:32:17 +00002974 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002975 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002976 if (in.IsFpuRegister()) {
2977 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2978 } else if (in.IsConstant()) {
2979 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2980 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002981 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002982 } else {
2983 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2984 Address(CpuRegister(RSP), in.GetStackIndex()));
2985 }
Roland Levillaincff13742014-11-17 14:32:17 +00002986 break;
2987
2988 default:
2989 LOG(FATAL) << "Unexpected type conversion from " << input_type
2990 << " to " << result_type;
2991 };
2992 break;
2993
Roland Levillaindff1f282014-11-05 14:15:05 +00002994 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002995 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002996 case Primitive::kPrimBoolean:
2997 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002998 case Primitive::kPrimByte:
2999 case Primitive::kPrimShort:
3000 case Primitive::kPrimInt:
3001 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00003002 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003003 if (in.IsRegister()) {
3004 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3005 } else if (in.IsConstant()) {
3006 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3007 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003008 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003009 } else {
3010 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3011 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3012 }
Roland Levillaincff13742014-11-17 14:32:17 +00003013 break;
3014
3015 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003016 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003017 if (in.IsRegister()) {
3018 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3019 } else if (in.IsConstant()) {
3020 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3021 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003022 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003023 } else {
3024 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3025 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3026 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003027 break;
3028
Roland Levillaincff13742014-11-17 14:32:17 +00003029 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003030 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003031 if (in.IsFpuRegister()) {
3032 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3033 } else if (in.IsConstant()) {
3034 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3035 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003036 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003037 } else {
3038 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3039 Address(CpuRegister(RSP), in.GetStackIndex()));
3040 }
Roland Levillaincff13742014-11-17 14:32:17 +00003041 break;
3042
3043 default:
3044 LOG(FATAL) << "Unexpected type conversion from " << input_type
3045 << " to " << result_type;
3046 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003047 break;
3048
3049 default:
3050 LOG(FATAL) << "Unexpected type conversion from " << input_type
3051 << " to " << result_type;
3052 }
3053}
3054
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003055void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003056 LocationSummary* locations =
3057 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003058 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003059 case Primitive::kPrimInt: {
3060 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003061 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3062 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003063 break;
3064 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003065
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003066 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003067 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003068 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003069 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003070 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003071 break;
3072 }
3073
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003074 case Primitive::kPrimDouble:
3075 case Primitive::kPrimFloat: {
3076 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003077 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003078 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003079 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003080 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081
3082 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003083 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003084 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003085}
3086
3087void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3088 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089 Location first = locations->InAt(0);
3090 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003091 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003092
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003093 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003094 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003095 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003096 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3097 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003098 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3099 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003100 } else {
3101 __ leal(out.AsRegister<CpuRegister>(), Address(
3102 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3103 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003104 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003105 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3106 __ addl(out.AsRegister<CpuRegister>(),
3107 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3108 } else {
3109 __ leal(out.AsRegister<CpuRegister>(), Address(
3110 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3111 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003112 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003113 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003114 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003115 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003116 break;
3117 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003118
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003119 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003120 if (second.IsRegister()) {
3121 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3122 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003123 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3124 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003125 } else {
3126 __ leaq(out.AsRegister<CpuRegister>(), Address(
3127 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3128 }
3129 } else {
3130 DCHECK(second.IsConstant());
3131 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3132 int32_t int32_value = Low32Bits(value);
3133 DCHECK_EQ(int32_value, value);
3134 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3135 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3136 } else {
3137 __ leaq(out.AsRegister<CpuRegister>(), Address(
3138 first.AsRegister<CpuRegister>(), int32_value));
3139 }
3140 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003141 break;
3142 }
3143
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003144 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003145 if (second.IsFpuRegister()) {
3146 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3147 } else if (second.IsConstant()) {
3148 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003149 codegen_->LiteralFloatAddress(
3150 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003151 } else {
3152 DCHECK(second.IsStackSlot());
3153 __ addss(first.AsFpuRegister<XmmRegister>(),
3154 Address(CpuRegister(RSP), second.GetStackIndex()));
3155 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003156 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003157 }
3158
3159 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003160 if (second.IsFpuRegister()) {
3161 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3162 } else if (second.IsConstant()) {
3163 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003164 codegen_->LiteralDoubleAddress(
3165 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003166 } else {
3167 DCHECK(second.IsDoubleStackSlot());
3168 __ addsd(first.AsFpuRegister<XmmRegister>(),
3169 Address(CpuRegister(RSP), second.GetStackIndex()));
3170 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003171 break;
3172 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003173
3174 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003175 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003176 }
3177}
3178
3179void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003180 LocationSummary* locations =
3181 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003182 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003183 case Primitive::kPrimInt: {
3184 locations->SetInAt(0, Location::RequiresRegister());
3185 locations->SetInAt(1, Location::Any());
3186 locations->SetOut(Location::SameAsFirstInput());
3187 break;
3188 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003189 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003190 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003191 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003192 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003193 break;
3194 }
Calin Juravle11351682014-10-23 15:38:15 +01003195 case Primitive::kPrimFloat:
3196 case Primitive::kPrimDouble: {
3197 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003198 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003199 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003200 break;
Calin Juravle11351682014-10-23 15:38:15 +01003201 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003202 default:
Calin Juravle11351682014-10-23 15:38:15 +01003203 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003204 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003205}
3206
3207void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3208 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003209 Location first = locations->InAt(0);
3210 Location second = locations->InAt(1);
3211 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003212 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003213 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003214 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003215 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003216 } else if (second.IsConstant()) {
3217 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003218 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003219 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003220 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003221 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003222 break;
3223 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003224 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003225 if (second.IsConstant()) {
3226 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3227 DCHECK(IsInt<32>(value));
3228 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3229 } else {
3230 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3231 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003232 break;
3233 }
3234
Calin Juravle11351682014-10-23 15:38:15 +01003235 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003236 if (second.IsFpuRegister()) {
3237 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3238 } else if (second.IsConstant()) {
3239 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003240 codegen_->LiteralFloatAddress(
3241 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003242 } else {
3243 DCHECK(second.IsStackSlot());
3244 __ subss(first.AsFpuRegister<XmmRegister>(),
3245 Address(CpuRegister(RSP), second.GetStackIndex()));
3246 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003247 break;
Calin Juravle11351682014-10-23 15:38:15 +01003248 }
3249
3250 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003251 if (second.IsFpuRegister()) {
3252 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3253 } else if (second.IsConstant()) {
3254 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003255 codegen_->LiteralDoubleAddress(
3256 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003257 } else {
3258 DCHECK(second.IsDoubleStackSlot());
3259 __ subsd(first.AsFpuRegister<XmmRegister>(),
3260 Address(CpuRegister(RSP), second.GetStackIndex()));
3261 }
Calin Juravle11351682014-10-23 15:38:15 +01003262 break;
3263 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003264
3265 default:
Calin Juravle11351682014-10-23 15:38:15 +01003266 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003267 }
3268}
3269
Calin Juravle34bacdf2014-10-07 20:23:36 +01003270void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3271 LocationSummary* locations =
3272 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3273 switch (mul->GetResultType()) {
3274 case Primitive::kPrimInt: {
3275 locations->SetInAt(0, Location::RequiresRegister());
3276 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003277 if (mul->InputAt(1)->IsIntConstant()) {
3278 // Can use 3 operand multiply.
3279 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3280 } else {
3281 locations->SetOut(Location::SameAsFirstInput());
3282 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003283 break;
3284 }
3285 case Primitive::kPrimLong: {
3286 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003287 locations->SetInAt(1, Location::Any());
3288 if (mul->InputAt(1)->IsLongConstant() &&
3289 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003290 // Can use 3 operand multiply.
3291 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3292 } else {
3293 locations->SetOut(Location::SameAsFirstInput());
3294 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003295 break;
3296 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003297 case Primitive::kPrimFloat:
3298 case Primitive::kPrimDouble: {
3299 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003300 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003301 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003302 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003303 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003304
3305 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003306 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003307 }
3308}
3309
3310void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3311 LocationSummary* locations = mul->GetLocations();
3312 Location first = locations->InAt(0);
3313 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003314 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003315 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003316 case Primitive::kPrimInt:
3317 // The constant may have ended up in a register, so test explicitly to avoid
3318 // problems where the output may not be the same as the first operand.
3319 if (mul->InputAt(1)->IsIntConstant()) {
3320 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3321 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3322 } else if (second.IsRegister()) {
3323 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003324 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003325 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003326 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003327 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003328 __ imull(first.AsRegister<CpuRegister>(),
3329 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003330 }
3331 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003332 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003333 // The constant may have ended up in a register, so test explicitly to avoid
3334 // problems where the output may not be the same as the first operand.
3335 if (mul->InputAt(1)->IsLongConstant()) {
3336 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3337 if (IsInt<32>(value)) {
3338 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3339 Immediate(static_cast<int32_t>(value)));
3340 } else {
3341 // Have to use the constant area.
3342 DCHECK(first.Equals(out));
3343 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3344 }
3345 } else if (second.IsRegister()) {
3346 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003347 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003348 } else {
3349 DCHECK(second.IsDoubleStackSlot());
3350 DCHECK(first.Equals(out));
3351 __ imulq(first.AsRegister<CpuRegister>(),
3352 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003353 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003354 break;
3355 }
3356
Calin Juravleb5bfa962014-10-21 18:02:24 +01003357 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003358 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003359 if (second.IsFpuRegister()) {
3360 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3361 } else if (second.IsConstant()) {
3362 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003363 codegen_->LiteralFloatAddress(
3364 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003365 } else {
3366 DCHECK(second.IsStackSlot());
3367 __ mulss(first.AsFpuRegister<XmmRegister>(),
3368 Address(CpuRegister(RSP), second.GetStackIndex()));
3369 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003370 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003371 }
3372
3373 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003374 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003375 if (second.IsFpuRegister()) {
3376 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3377 } else if (second.IsConstant()) {
3378 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003379 codegen_->LiteralDoubleAddress(
3380 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003381 } else {
3382 DCHECK(second.IsDoubleStackSlot());
3383 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3384 Address(CpuRegister(RSP), second.GetStackIndex()));
3385 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003386 break;
3387 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003388
3389 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003390 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003391 }
3392}
3393
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003394void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3395 uint32_t stack_adjustment, bool is_float) {
3396 if (source.IsStackSlot()) {
3397 DCHECK(is_float);
3398 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3399 } else if (source.IsDoubleStackSlot()) {
3400 DCHECK(!is_float);
3401 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3402 } else {
3403 // Write the value to the temporary location on the stack and load to FP stack.
3404 if (is_float) {
3405 Location stack_temp = Location::StackSlot(temp_offset);
3406 codegen_->Move(stack_temp, source);
3407 __ flds(Address(CpuRegister(RSP), temp_offset));
3408 } else {
3409 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3410 codegen_->Move(stack_temp, source);
3411 __ fldl(Address(CpuRegister(RSP), temp_offset));
3412 }
3413 }
3414}
3415
3416void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3417 Primitive::Type type = rem->GetResultType();
3418 bool is_float = type == Primitive::kPrimFloat;
3419 size_t elem_size = Primitive::ComponentSize(type);
3420 LocationSummary* locations = rem->GetLocations();
3421 Location first = locations->InAt(0);
3422 Location second = locations->InAt(1);
3423 Location out = locations->Out();
3424
3425 // Create stack space for 2 elements.
3426 // TODO: enhance register allocator to ask for stack temporaries.
3427 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3428
3429 // Load the values to the FP stack in reverse order, using temporaries if needed.
3430 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3431 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3432
3433 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003434 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003435 __ Bind(&retry);
3436 __ fprem();
3437
3438 // Move FP status to AX.
3439 __ fstsw();
3440
3441 // And see if the argument reduction is complete. This is signaled by the
3442 // C2 FPU flag bit set to 0.
3443 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3444 __ j(kNotEqual, &retry);
3445
3446 // We have settled on the final value. Retrieve it into an XMM register.
3447 // Store FP top of stack to real stack.
3448 if (is_float) {
3449 __ fsts(Address(CpuRegister(RSP), 0));
3450 } else {
3451 __ fstl(Address(CpuRegister(RSP), 0));
3452 }
3453
3454 // Pop the 2 items from the FP stack.
3455 __ fucompp();
3456
3457 // Load the value from the stack into an XMM register.
3458 DCHECK(out.IsFpuRegister()) << out;
3459 if (is_float) {
3460 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3461 } else {
3462 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3463 }
3464
3465 // And remove the temporary stack space we allocated.
3466 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3467}
3468
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003469void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3470 DCHECK(instruction->IsDiv() || instruction->IsRem());
3471
3472 LocationSummary* locations = instruction->GetLocations();
3473 Location second = locations->InAt(1);
3474 DCHECK(second.IsConstant());
3475
3476 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3477 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003478 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003479
3480 DCHECK(imm == 1 || imm == -1);
3481
3482 switch (instruction->GetResultType()) {
3483 case Primitive::kPrimInt: {
3484 if (instruction->IsRem()) {
3485 __ xorl(output_register, output_register);
3486 } else {
3487 __ movl(output_register, input_register);
3488 if (imm == -1) {
3489 __ negl(output_register);
3490 }
3491 }
3492 break;
3493 }
3494
3495 case Primitive::kPrimLong: {
3496 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003497 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003498 } else {
3499 __ movq(output_register, input_register);
3500 if (imm == -1) {
3501 __ negq(output_register);
3502 }
3503 }
3504 break;
3505 }
3506
3507 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003508 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003509 }
3510}
3511
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003512void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 LocationSummary* locations = instruction->GetLocations();
3514 Location second = locations->InAt(1);
3515
3516 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3517 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3518
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003519 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003520 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3521 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003522
3523 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3524
3525 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003526 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003527 __ testl(numerator, numerator);
3528 __ cmov(kGreaterEqual, tmp, numerator);
3529 int shift = CTZ(imm);
3530 __ sarl(tmp, Immediate(shift));
3531
3532 if (imm < 0) {
3533 __ negl(tmp);
3534 }
3535
3536 __ movl(output_register, tmp);
3537 } else {
3538 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3539 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3540
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003541 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003542 __ addq(rdx, numerator);
3543 __ testq(numerator, numerator);
3544 __ cmov(kGreaterEqual, rdx, numerator);
3545 int shift = CTZ(imm);
3546 __ sarq(rdx, Immediate(shift));
3547
3548 if (imm < 0) {
3549 __ negq(rdx);
3550 }
3551
3552 __ movq(output_register, rdx);
3553 }
3554}
3555
3556void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3557 DCHECK(instruction->IsDiv() || instruction->IsRem());
3558
3559 LocationSummary* locations = instruction->GetLocations();
3560 Location second = locations->InAt(1);
3561
3562 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3563 : locations->GetTemp(0).AsRegister<CpuRegister>();
3564 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3565 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3566 : locations->Out().AsRegister<CpuRegister>();
3567 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3568
3569 DCHECK_EQ(RAX, eax.AsRegister());
3570 DCHECK_EQ(RDX, edx.AsRegister());
3571 if (instruction->IsDiv()) {
3572 DCHECK_EQ(RAX, out.AsRegister());
3573 } else {
3574 DCHECK_EQ(RDX, out.AsRegister());
3575 }
3576
3577 int64_t magic;
3578 int shift;
3579
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003580 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003581 if (instruction->GetResultType() == Primitive::kPrimInt) {
3582 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3583
3584 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3585
3586 __ movl(numerator, eax);
3587
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003588 __ movl(eax, Immediate(magic));
3589 __ imull(numerator);
3590
3591 if (imm > 0 && magic < 0) {
3592 __ addl(edx, numerator);
3593 } else if (imm < 0 && magic > 0) {
3594 __ subl(edx, numerator);
3595 }
3596
3597 if (shift != 0) {
3598 __ sarl(edx, Immediate(shift));
3599 }
3600
3601 __ movl(eax, edx);
3602 __ shrl(edx, Immediate(31));
3603 __ addl(edx, eax);
3604
3605 if (instruction->IsRem()) {
3606 __ movl(eax, numerator);
3607 __ imull(edx, Immediate(imm));
3608 __ subl(eax, edx);
3609 __ movl(edx, eax);
3610 } else {
3611 __ movl(eax, edx);
3612 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003613 } else {
3614 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3615
3616 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3617
3618 CpuRegister rax = eax;
3619 CpuRegister rdx = edx;
3620
3621 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3622
3623 // Save the numerator.
3624 __ movq(numerator, rax);
3625
3626 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003627 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003628
3629 // RDX:RAX = magic * numerator
3630 __ imulq(numerator);
3631
3632 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003633 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003634 __ addq(rdx, numerator);
3635 } else if (imm < 0 && magic > 0) {
3636 // RDX -= numerator
3637 __ subq(rdx, numerator);
3638 }
3639
3640 // Shift if needed.
3641 if (shift != 0) {
3642 __ sarq(rdx, Immediate(shift));
3643 }
3644
3645 // RDX += 1 if RDX < 0
3646 __ movq(rax, rdx);
3647 __ shrq(rdx, Immediate(63));
3648 __ addq(rdx, rax);
3649
3650 if (instruction->IsRem()) {
3651 __ movq(rax, numerator);
3652
3653 if (IsInt<32>(imm)) {
3654 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3655 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003656 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003657 }
3658
3659 __ subq(rax, rdx);
3660 __ movq(rdx, rax);
3661 } else {
3662 __ movq(rax, rdx);
3663 }
3664 }
3665}
3666
Calin Juravlebacfec32014-11-14 15:54:36 +00003667void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3668 DCHECK(instruction->IsDiv() || instruction->IsRem());
3669 Primitive::Type type = instruction->GetResultType();
3670 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3671
3672 bool is_div = instruction->IsDiv();
3673 LocationSummary* locations = instruction->GetLocations();
3674
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003675 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3676 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003677
Roland Levillain271ab9c2014-11-27 15:23:57 +00003678 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003679 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003680
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003681 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003682 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003683
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003684 if (imm == 0) {
3685 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3686 } else if (imm == 1 || imm == -1) {
3687 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003688 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003689 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003690 } else {
3691 DCHECK(imm <= -2 || imm >= 2);
3692 GenerateDivRemWithAnyConstant(instruction);
3693 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003694 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003695 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003696 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003697 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003698 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003699
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003700 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3701 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3702 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3703 // so it's safe to just use negl instead of more complex comparisons.
3704 if (type == Primitive::kPrimInt) {
3705 __ cmpl(second_reg, Immediate(-1));
3706 __ j(kEqual, slow_path->GetEntryLabel());
3707 // edx:eax <- sign-extended of eax
3708 __ cdq();
3709 // eax = quotient, edx = remainder
3710 __ idivl(second_reg);
3711 } else {
3712 __ cmpq(second_reg, Immediate(-1));
3713 __ j(kEqual, slow_path->GetEntryLabel());
3714 // rdx:rax <- sign-extended of rax
3715 __ cqo();
3716 // rax = quotient, rdx = remainder
3717 __ idivq(second_reg);
3718 }
3719 __ Bind(slow_path->GetExitLabel());
3720 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003721}
3722
Calin Juravle7c4954d2014-10-28 16:57:40 +00003723void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3724 LocationSummary* locations =
3725 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3726 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003727 case Primitive::kPrimInt:
3728 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003729 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003730 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003731 locations->SetOut(Location::SameAsFirstInput());
3732 // Intel uses edx:eax as the dividend.
3733 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003734 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3735 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3736 // output and request another temp.
3737 if (div->InputAt(1)->IsConstant()) {
3738 locations->AddTemp(Location::RequiresRegister());
3739 }
Calin Juravled0d48522014-11-04 16:40:20 +00003740 break;
3741 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003742
Calin Juravle7c4954d2014-10-28 16:57:40 +00003743 case Primitive::kPrimFloat:
3744 case Primitive::kPrimDouble: {
3745 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003746 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003747 locations->SetOut(Location::SameAsFirstInput());
3748 break;
3749 }
3750
3751 default:
3752 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3753 }
3754}
3755
3756void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3757 LocationSummary* locations = div->GetLocations();
3758 Location first = locations->InAt(0);
3759 Location second = locations->InAt(1);
3760 DCHECK(first.Equals(locations->Out()));
3761
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003762 Primitive::Type type = div->GetResultType();
3763 switch (type) {
3764 case Primitive::kPrimInt:
3765 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003766 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003767 break;
3768 }
3769
Calin Juravle7c4954d2014-10-28 16:57:40 +00003770 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003771 if (second.IsFpuRegister()) {
3772 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3773 } else if (second.IsConstant()) {
3774 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003775 codegen_->LiteralFloatAddress(
3776 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003777 } else {
3778 DCHECK(second.IsStackSlot());
3779 __ divss(first.AsFpuRegister<XmmRegister>(),
3780 Address(CpuRegister(RSP), second.GetStackIndex()));
3781 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003782 break;
3783 }
3784
3785 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003786 if (second.IsFpuRegister()) {
3787 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3788 } else if (second.IsConstant()) {
3789 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003790 codegen_->LiteralDoubleAddress(
3791 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003792 } else {
3793 DCHECK(second.IsDoubleStackSlot());
3794 __ divsd(first.AsFpuRegister<XmmRegister>(),
3795 Address(CpuRegister(RSP), second.GetStackIndex()));
3796 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003797 break;
3798 }
3799
3800 default:
3801 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3802 }
3803}
3804
Calin Juravlebacfec32014-11-14 15:54:36 +00003805void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003806 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003807 LocationSummary* locations =
3808 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003809
3810 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003811 case Primitive::kPrimInt:
3812 case Primitive::kPrimLong: {
3813 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003814 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003815 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3816 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003817 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3818 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3819 // output and request another temp.
3820 if (rem->InputAt(1)->IsConstant()) {
3821 locations->AddTemp(Location::RequiresRegister());
3822 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003823 break;
3824 }
3825
3826 case Primitive::kPrimFloat:
3827 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003828 locations->SetInAt(0, Location::Any());
3829 locations->SetInAt(1, Location::Any());
3830 locations->SetOut(Location::RequiresFpuRegister());
3831 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003832 break;
3833 }
3834
3835 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003836 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003837 }
3838}
3839
3840void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3841 Primitive::Type type = rem->GetResultType();
3842 switch (type) {
3843 case Primitive::kPrimInt:
3844 case Primitive::kPrimLong: {
3845 GenerateDivRemIntegral(rem);
3846 break;
3847 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003848 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003849 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003850 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003851 break;
3852 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003853 default:
3854 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3855 }
3856}
3857
Calin Juravled0d48522014-11-04 16:40:20 +00003858void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003859 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003860 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003861}
3862
3863void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003864 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003865 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3866 codegen_->AddSlowPath(slow_path);
3867
3868 LocationSummary* locations = instruction->GetLocations();
3869 Location value = locations->InAt(0);
3870
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003871 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003872 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003873 case Primitive::kPrimByte:
3874 case Primitive::kPrimChar:
3875 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003876 case Primitive::kPrimInt: {
3877 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003878 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003879 __ j(kEqual, slow_path->GetEntryLabel());
3880 } else if (value.IsStackSlot()) {
3881 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3882 __ j(kEqual, slow_path->GetEntryLabel());
3883 } else {
3884 DCHECK(value.IsConstant()) << value;
3885 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003886 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003887 }
3888 }
3889 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003890 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003891 case Primitive::kPrimLong: {
3892 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003893 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003894 __ j(kEqual, slow_path->GetEntryLabel());
3895 } else if (value.IsDoubleStackSlot()) {
3896 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3897 __ j(kEqual, slow_path->GetEntryLabel());
3898 } else {
3899 DCHECK(value.IsConstant()) << value;
3900 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003901 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003902 }
3903 }
3904 break;
3905 }
3906 default:
3907 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003908 }
Calin Juravled0d48522014-11-04 16:40:20 +00003909}
3910
Calin Juravle9aec02f2014-11-18 23:06:35 +00003911void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3912 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3913
3914 LocationSummary* locations =
3915 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3916
3917 switch (op->GetResultType()) {
3918 case Primitive::kPrimInt:
3919 case Primitive::kPrimLong: {
3920 locations->SetInAt(0, Location::RequiresRegister());
3921 // The shift count needs to be in CL.
3922 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3923 locations->SetOut(Location::SameAsFirstInput());
3924 break;
3925 }
3926 default:
3927 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3928 }
3929}
3930
3931void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3932 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3933
3934 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003935 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003936 Location second = locations->InAt(1);
3937
3938 switch (op->GetResultType()) {
3939 case Primitive::kPrimInt: {
3940 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003941 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003942 if (op->IsShl()) {
3943 __ shll(first_reg, second_reg);
3944 } else if (op->IsShr()) {
3945 __ sarl(first_reg, second_reg);
3946 } else {
3947 __ shrl(first_reg, second_reg);
3948 }
3949 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003950 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003951 if (op->IsShl()) {
3952 __ shll(first_reg, imm);
3953 } else if (op->IsShr()) {
3954 __ sarl(first_reg, imm);
3955 } else {
3956 __ shrl(first_reg, imm);
3957 }
3958 }
3959 break;
3960 }
3961 case Primitive::kPrimLong: {
3962 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003963 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003964 if (op->IsShl()) {
3965 __ shlq(first_reg, second_reg);
3966 } else if (op->IsShr()) {
3967 __ sarq(first_reg, second_reg);
3968 } else {
3969 __ shrq(first_reg, second_reg);
3970 }
3971 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003972 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003973 if (op->IsShl()) {
3974 __ shlq(first_reg, imm);
3975 } else if (op->IsShr()) {
3976 __ sarq(first_reg, imm);
3977 } else {
3978 __ shrq(first_reg, imm);
3979 }
3980 }
3981 break;
3982 }
3983 default:
3984 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003985 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003986 }
3987}
3988
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003989void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3990 LocationSummary* locations =
3991 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3992
3993 switch (ror->GetResultType()) {
3994 case Primitive::kPrimInt:
3995 case Primitive::kPrimLong: {
3996 locations->SetInAt(0, Location::RequiresRegister());
3997 // The shift count needs to be in CL (unless it is a constant).
3998 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3999 locations->SetOut(Location::SameAsFirstInput());
4000 break;
4001 }
4002 default:
4003 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4004 UNREACHABLE();
4005 }
4006}
4007
4008void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4009 LocationSummary* locations = ror->GetLocations();
4010 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4011 Location second = locations->InAt(1);
4012
4013 switch (ror->GetResultType()) {
4014 case Primitive::kPrimInt:
4015 if (second.IsRegister()) {
4016 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4017 __ rorl(first_reg, second_reg);
4018 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004019 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004020 __ rorl(first_reg, imm);
4021 }
4022 break;
4023 case Primitive::kPrimLong:
4024 if (second.IsRegister()) {
4025 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4026 __ rorq(first_reg, second_reg);
4027 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004028 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004029 __ rorq(first_reg, imm);
4030 }
4031 break;
4032 default:
4033 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4034 UNREACHABLE();
4035 }
4036}
4037
Calin Juravle9aec02f2014-11-18 23:06:35 +00004038void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4039 HandleShift(shl);
4040}
4041
4042void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4043 HandleShift(shl);
4044}
4045
4046void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4047 HandleShift(shr);
4048}
4049
4050void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4051 HandleShift(shr);
4052}
4053
4054void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4055 HandleShift(ushr);
4056}
4057
4058void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4059 HandleShift(ushr);
4060}
4061
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004062void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004063 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004064 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004065 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004066 if (instruction->IsStringAlloc()) {
4067 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4068 } else {
4069 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4070 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4071 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004072 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004073}
4074
4075void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004076 // Note: if heap poisoning is enabled, the entry point takes cares
4077 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004078 if (instruction->IsStringAlloc()) {
4079 // String is allocated through StringFactory. Call NewEmptyString entry point.
4080 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004081 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004082 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4083 __ call(Address(temp, code_offset.SizeValue()));
4084 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4085 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004086 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004087 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4088 DCHECK(!codegen_->IsLeafMethod());
4089 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004090}
4091
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004092void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4093 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004094 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004095 InvokeRuntimeCallingConvention calling_convention;
4096 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004097 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004098 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004099 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004100}
4101
4102void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4103 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004104 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
4105 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004106 // Note: if heap poisoning is enabled, the entry point takes cares
4107 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004108 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004109 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004110
4111 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004112}
4113
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004114void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004115 LocationSummary* locations =
4116 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004117 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4118 if (location.IsStackSlot()) {
4119 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4120 } else if (location.IsDoubleStackSlot()) {
4121 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4122 }
4123 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004124}
4125
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004126void InstructionCodeGeneratorX86_64::VisitParameterValue(
4127 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004128 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004129}
4130
4131void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4132 LocationSummary* locations =
4133 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4134 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4135}
4136
4137void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4138 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4139 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004140}
4141
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004142void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4143 LocationSummary* locations =
4144 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4145 locations->SetInAt(0, Location::RequiresRegister());
4146 locations->SetOut(Location::RequiresRegister());
4147}
4148
4149void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4150 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004151 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004152 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004153 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004154 __ movq(locations->Out().AsRegister<CpuRegister>(),
4155 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004156 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004157 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004158 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004159 __ movq(locations->Out().AsRegister<CpuRegister>(),
4160 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4161 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004162 __ movq(locations->Out().AsRegister<CpuRegister>(),
4163 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004164 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004165}
4166
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004167void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004168 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004169 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004170 locations->SetInAt(0, Location::RequiresRegister());
4171 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004172}
4173
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004174void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4175 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004176 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4177 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004178 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004179 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004180 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004181 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004182 break;
4183
4184 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004185 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004186 break;
4187
4188 default:
4189 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4190 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004191}
4192
David Brazdil66d126e2015-04-03 16:02:44 +01004193void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4194 LocationSummary* locations =
4195 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4196 locations->SetInAt(0, Location::RequiresRegister());
4197 locations->SetOut(Location::SameAsFirstInput());
4198}
4199
4200void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004201 LocationSummary* locations = bool_not->GetLocations();
4202 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4203 locations->Out().AsRegister<CpuRegister>().AsRegister());
4204 Location out = locations->Out();
4205 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4206}
4207
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004208void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004209 LocationSummary* locations =
4210 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004211 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004212 locations->SetInAt(i, Location::Any());
4213 }
4214 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004215}
4216
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004217void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004218 LOG(FATAL) << "Unimplemented";
4219}
4220
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004221void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004222 /*
4223 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004224 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004225 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4226 */
4227 switch (kind) {
4228 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004229 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004230 break;
4231 }
4232 case MemBarrierKind::kAnyStore:
4233 case MemBarrierKind::kLoadAny:
4234 case MemBarrierKind::kStoreStore: {
4235 // nop
4236 break;
4237 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004238 case MemBarrierKind::kNTStoreStore:
4239 // Non-Temporal Store/Store needs an explicit fence.
4240 MemoryFence(/* non-temporal */ true);
4241 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004242 }
4243}
4244
4245void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4246 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4247
Roland Levillain0d5a2812015-11-13 10:07:31 +00004248 bool object_field_get_with_read_barrier =
4249 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004250 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004251 new (GetGraph()->GetArena()) LocationSummary(instruction,
4252 object_field_get_with_read_barrier ?
4253 LocationSummary::kCallOnSlowPath :
4254 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004255 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004256 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004257 }
Calin Juravle52c48962014-12-16 17:02:57 +00004258 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004259 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4260 locations->SetOut(Location::RequiresFpuRegister());
4261 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004262 // The output overlaps for an object field get when read barriers
4263 // are enabled: we do not want the move to overwrite the object's
4264 // location, as we need it to emit the read barrier.
4265 locations->SetOut(
4266 Location::RequiresRegister(),
4267 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004268 }
Calin Juravle52c48962014-12-16 17:02:57 +00004269}
4270
4271void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4272 const FieldInfo& field_info) {
4273 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4274
4275 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004276 Location base_loc = locations->InAt(0);
4277 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004278 Location out = locations->Out();
4279 bool is_volatile = field_info.IsVolatile();
4280 Primitive::Type field_type = field_info.GetFieldType();
4281 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4282
4283 switch (field_type) {
4284 case Primitive::kPrimBoolean: {
4285 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4286 break;
4287 }
4288
4289 case Primitive::kPrimByte: {
4290 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4291 break;
4292 }
4293
4294 case Primitive::kPrimShort: {
4295 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4296 break;
4297 }
4298
4299 case Primitive::kPrimChar: {
4300 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4301 break;
4302 }
4303
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004304 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004305 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4306 break;
4307 }
4308
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004309 case Primitive::kPrimNot: {
4310 // /* HeapReference<Object> */ out = *(base + offset)
4311 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004312 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004313 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004314 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004315 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004316 if (is_volatile) {
4317 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4318 }
4319 } else {
4320 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4321 codegen_->MaybeRecordImplicitNullCheck(instruction);
4322 if (is_volatile) {
4323 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4324 }
4325 // If read barriers are enabled, emit read barriers other than
4326 // Baker's using a slow path (and also unpoison the loaded
4327 // reference, if heap poisoning is enabled).
4328 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4329 }
4330 break;
4331 }
4332
Calin Juravle52c48962014-12-16 17:02:57 +00004333 case Primitive::kPrimLong: {
4334 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4335 break;
4336 }
4337
4338 case Primitive::kPrimFloat: {
4339 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4340 break;
4341 }
4342
4343 case Primitive::kPrimDouble: {
4344 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4345 break;
4346 }
4347
4348 case Primitive::kPrimVoid:
4349 LOG(FATAL) << "Unreachable type " << field_type;
4350 UNREACHABLE();
4351 }
4352
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004353 if (field_type == Primitive::kPrimNot) {
4354 // Potential implicit null checks, in the case of reference
4355 // fields, are handled in the previous switch statement.
4356 } else {
4357 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004358 }
Roland Levillain4d027112015-07-01 15:41:14 +01004359
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004360 if (is_volatile) {
4361 if (field_type == Primitive::kPrimNot) {
4362 // Memory barriers, in the case of references, are also handled
4363 // in the previous switch statement.
4364 } else {
4365 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4366 }
Roland Levillain4d027112015-07-01 15:41:14 +01004367 }
Calin Juravle52c48962014-12-16 17:02:57 +00004368}
4369
4370void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4371 const FieldInfo& field_info) {
4372 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4373
4374 LocationSummary* locations =
4375 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004376 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004377 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004378 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004379 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004380
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004381 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004382 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004383 if (is_volatile) {
4384 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4385 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4386 } else {
4387 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4388 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004389 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004390 if (is_volatile) {
4391 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4392 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4393 } else {
4394 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4395 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004396 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004397 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004398 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004399 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004400 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004401 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4402 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004403 locations->AddTemp(Location::RequiresRegister());
4404 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004405}
4406
Calin Juravle52c48962014-12-16 17:02:57 +00004407void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004408 const FieldInfo& field_info,
4409 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004410 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4411
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004412 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004413 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4414 Location value = locations->InAt(1);
4415 bool is_volatile = field_info.IsVolatile();
4416 Primitive::Type field_type = field_info.GetFieldType();
4417 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4418
4419 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004420 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004421 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004422
Mark Mendellea5af682015-10-22 17:35:49 -04004423 bool maybe_record_implicit_null_check_done = false;
4424
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004425 switch (field_type) {
4426 case Primitive::kPrimBoolean:
4427 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004428 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004429 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004430 __ movb(Address(base, offset), Immediate(v));
4431 } else {
4432 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4433 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004434 break;
4435 }
4436
4437 case Primitive::kPrimShort:
4438 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004439 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004440 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004441 __ movw(Address(base, offset), Immediate(v));
4442 } else {
4443 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4444 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004445 break;
4446 }
4447
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004448 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004449 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004450 if (value.IsConstant()) {
4451 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004452 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4453 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4454 // Note: if heap poisoning is enabled, no need to poison
4455 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004456 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004457 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004458 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4459 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4460 __ movl(temp, value.AsRegister<CpuRegister>());
4461 __ PoisonHeapReference(temp);
4462 __ movl(Address(base, offset), temp);
4463 } else {
4464 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4465 }
Mark Mendell40741f32015-04-20 22:10:34 -04004466 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004467 break;
4468 }
4469
4470 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004471 if (value.IsConstant()) {
4472 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004473 codegen_->MoveInt64ToAddress(Address(base, offset),
4474 Address(base, offset + sizeof(int32_t)),
4475 v,
4476 instruction);
4477 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004478 } else {
4479 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4480 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004481 break;
4482 }
4483
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004484 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004485 if (value.IsConstant()) {
4486 int32_t v =
4487 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4488 __ movl(Address(base, offset), Immediate(v));
4489 } else {
4490 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4491 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004492 break;
4493 }
4494
4495 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004496 if (value.IsConstant()) {
4497 int64_t v =
4498 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4499 codegen_->MoveInt64ToAddress(Address(base, offset),
4500 Address(base, offset + sizeof(int32_t)),
4501 v,
4502 instruction);
4503 maybe_record_implicit_null_check_done = true;
4504 } else {
4505 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4506 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004507 break;
4508 }
4509
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004510 case Primitive::kPrimVoid:
4511 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004512 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004513 }
Calin Juravle52c48962014-12-16 17:02:57 +00004514
Mark Mendellea5af682015-10-22 17:35:49 -04004515 if (!maybe_record_implicit_null_check_done) {
4516 codegen_->MaybeRecordImplicitNullCheck(instruction);
4517 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004518
4519 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4520 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4521 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004522 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004523 }
4524
Calin Juravle52c48962014-12-16 17:02:57 +00004525 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004526 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004527 }
4528}
4529
4530void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4531 HandleFieldSet(instruction, instruction->GetFieldInfo());
4532}
4533
4534void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004535 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004536}
4537
4538void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004539 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004540}
4541
4542void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004543 HandleFieldGet(instruction, instruction->GetFieldInfo());
4544}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004545
Calin Juravle52c48962014-12-16 17:02:57 +00004546void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4547 HandleFieldGet(instruction);
4548}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004549
Calin Juravle52c48962014-12-16 17:02:57 +00004550void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4551 HandleFieldGet(instruction, instruction->GetFieldInfo());
4552}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004553
Calin Juravle52c48962014-12-16 17:02:57 +00004554void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4555 HandleFieldSet(instruction, instruction->GetFieldInfo());
4556}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004557
Calin Juravle52c48962014-12-16 17:02:57 +00004558void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004559 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004560}
4561
Calin Juravlee460d1d2015-09-29 04:52:17 +01004562void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4563 HUnresolvedInstanceFieldGet* instruction) {
4564 FieldAccessCallingConventionX86_64 calling_convention;
4565 codegen_->CreateUnresolvedFieldLocationSummary(
4566 instruction, instruction->GetFieldType(), calling_convention);
4567}
4568
4569void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4570 HUnresolvedInstanceFieldGet* instruction) {
4571 FieldAccessCallingConventionX86_64 calling_convention;
4572 codegen_->GenerateUnresolvedFieldAccess(instruction,
4573 instruction->GetFieldType(),
4574 instruction->GetFieldIndex(),
4575 instruction->GetDexPc(),
4576 calling_convention);
4577}
4578
4579void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4580 HUnresolvedInstanceFieldSet* instruction) {
4581 FieldAccessCallingConventionX86_64 calling_convention;
4582 codegen_->CreateUnresolvedFieldLocationSummary(
4583 instruction, instruction->GetFieldType(), calling_convention);
4584}
4585
4586void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4587 HUnresolvedInstanceFieldSet* instruction) {
4588 FieldAccessCallingConventionX86_64 calling_convention;
4589 codegen_->GenerateUnresolvedFieldAccess(instruction,
4590 instruction->GetFieldType(),
4591 instruction->GetFieldIndex(),
4592 instruction->GetDexPc(),
4593 calling_convention);
4594}
4595
4596void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4597 HUnresolvedStaticFieldGet* instruction) {
4598 FieldAccessCallingConventionX86_64 calling_convention;
4599 codegen_->CreateUnresolvedFieldLocationSummary(
4600 instruction, instruction->GetFieldType(), calling_convention);
4601}
4602
4603void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4604 HUnresolvedStaticFieldGet* instruction) {
4605 FieldAccessCallingConventionX86_64 calling_convention;
4606 codegen_->GenerateUnresolvedFieldAccess(instruction,
4607 instruction->GetFieldType(),
4608 instruction->GetFieldIndex(),
4609 instruction->GetDexPc(),
4610 calling_convention);
4611}
4612
4613void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4614 HUnresolvedStaticFieldSet* instruction) {
4615 FieldAccessCallingConventionX86_64 calling_convention;
4616 codegen_->CreateUnresolvedFieldLocationSummary(
4617 instruction, instruction->GetFieldType(), calling_convention);
4618}
4619
4620void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4621 HUnresolvedStaticFieldSet* instruction) {
4622 FieldAccessCallingConventionX86_64 calling_convention;
4623 codegen_->GenerateUnresolvedFieldAccess(instruction,
4624 instruction->GetFieldType(),
4625 instruction->GetFieldIndex(),
4626 instruction->GetDexPc(),
4627 calling_convention);
4628}
4629
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004630void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004631 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4632 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4633 ? Location::RequiresRegister()
4634 : Location::Any();
4635 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004636}
4637
Calin Juravle2ae48182016-03-16 14:05:09 +00004638void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4639 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004640 return;
4641 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004642 LocationSummary* locations = instruction->GetLocations();
4643 Location obj = locations->InAt(0);
4644
4645 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004646 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004647}
4648
Calin Juravle2ae48182016-03-16 14:05:09 +00004649void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004650 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004651 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004652
4653 LocationSummary* locations = instruction->GetLocations();
4654 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004655
4656 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004657 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004658 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004659 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004660 } else {
4661 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004662 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004663 __ jmp(slow_path->GetEntryLabel());
4664 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004665 }
4666 __ j(kEqual, slow_path->GetEntryLabel());
4667}
4668
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004669void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004670 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004671}
4672
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004673void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004674 bool object_array_get_with_read_barrier =
4675 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004676 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004677 new (GetGraph()->GetArena()) LocationSummary(instruction,
4678 object_array_get_with_read_barrier ?
4679 LocationSummary::kCallOnSlowPath :
4680 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004681 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004682 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004683 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004684 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004685 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004686 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4687 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4688 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004689 // The output overlaps for an object array get when read barriers
4690 // are enabled: we do not want the move to overwrite the array's
4691 // location, as we need it to emit the read barrier.
4692 locations->SetOut(
4693 Location::RequiresRegister(),
4694 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004695 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004696}
4697
4698void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4699 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004700 Location obj_loc = locations->InAt(0);
4701 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004702 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004703 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004704 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004705
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004706 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004707 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004708 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004709 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004710 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004711 break;
4712 }
4713
4714 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004715 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004716 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004717 break;
4718 }
4719
4720 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004721 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004722 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004723 break;
4724 }
4725
4726 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004727 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004728 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4729 // Branch cases into compressed and uncompressed for each index's type.
4730 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4731 NearLabel done, not_compressed;
4732 __ cmpl(Address(obj, count_offset), Immediate(0));
4733 codegen_->MaybeRecordImplicitNullCheck(instruction);
4734 __ j(kGreaterEqual, &not_compressed);
4735 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4736 __ jmp(&done);
4737 __ Bind(&not_compressed);
4738 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4739 __ Bind(&done);
4740 } else {
4741 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4742 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004743 break;
4744 }
4745
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004746 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004747 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004748 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004749 break;
4750 }
4751
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004752 case Primitive::kPrimNot: {
4753 static_assert(
4754 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4755 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004756 // /* HeapReference<Object> */ out =
4757 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4758 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004759 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004760 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004761 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004762 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004763 } else {
4764 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004765 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4766 codegen_->MaybeRecordImplicitNullCheck(instruction);
4767 // If read barriers are enabled, emit read barriers other than
4768 // Baker's using a slow path (and also unpoison the loaded
4769 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004770 if (index.IsConstant()) {
4771 uint32_t offset =
4772 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004773 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4774 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004775 codegen_->MaybeGenerateReadBarrierSlow(
4776 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4777 }
4778 }
4779 break;
4780 }
4781
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004782 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004783 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004784 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004785 break;
4786 }
4787
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004788 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004789 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004790 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004791 break;
4792 }
4793
4794 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004795 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004796 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004797 break;
4798 }
4799
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004800 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004801 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004802 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004803 }
Roland Levillain4d027112015-07-01 15:41:14 +01004804
4805 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004806 // Potential implicit null checks, in the case of reference
4807 // arrays, are handled in the previous switch statement.
4808 } else {
4809 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004810 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004811}
4812
4813void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004814 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004815
4816 bool needs_write_barrier =
4817 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004818 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004819
Nicolas Geoffray39468442014-09-02 15:17:15 +01004820 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004821 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004822 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004823 LocationSummary::kCallOnSlowPath :
4824 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004825
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004826 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004827 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4828 if (Primitive::IsFloatingPointType(value_type)) {
4829 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004830 } else {
4831 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4832 }
4833
4834 if (needs_write_barrier) {
4835 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004836 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004837 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004838 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004839}
4840
4841void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4842 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004843 Location array_loc = locations->InAt(0);
4844 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004845 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004846 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004847 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004848 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004849 bool needs_write_barrier =
4850 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004851 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4852 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4853 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004854
4855 switch (value_type) {
4856 case Primitive::kPrimBoolean:
4857 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004858 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004859 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004860 if (value.IsRegister()) {
4861 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004862 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004863 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004864 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004865 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004866 break;
4867 }
4868
4869 case Primitive::kPrimShort:
4870 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004871 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004872 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004873 if (value.IsRegister()) {
4874 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004875 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004876 DCHECK(value.IsConstant()) << value;
4877 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004878 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004879 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004880 break;
4881 }
4882
4883 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004884 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004885 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004886
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004887 if (!value.IsRegister()) {
4888 // Just setting null.
4889 DCHECK(instruction->InputAt(2)->IsNullConstant());
4890 DCHECK(value.IsConstant()) << value;
4891 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004892 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004893 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004894 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004895 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004896 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004897
4898 DCHECK(needs_write_barrier);
4899 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004900 // We cannot use a NearLabel for `done`, as its range may be too
4901 // short when Baker read barriers are enabled.
4902 Label done;
4903 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004904 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004905 Location temp_loc = locations->GetTemp(0);
4906 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004907 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004908 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4909 codegen_->AddSlowPath(slow_path);
4910 if (instruction->GetValueCanBeNull()) {
4911 __ testl(register_value, register_value);
4912 __ j(kNotEqual, &not_null);
4913 __ movl(address, Immediate(0));
4914 codegen_->MaybeRecordImplicitNullCheck(instruction);
4915 __ jmp(&done);
4916 __ Bind(&not_null);
4917 }
4918
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004919 // Note that when Baker read barriers are enabled, the type
4920 // checks are performed without read barriers. This is fine,
4921 // even in the case where a class object is in the from-space
4922 // after the flip, as a comparison involving such a type would
4923 // not produce a false positive; it may of course produce a
4924 // false negative, in which case we would take the ArraySet
4925 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004926
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004927 // /* HeapReference<Class> */ temp = array->klass_
4928 __ movl(temp, Address(array, class_offset));
4929 codegen_->MaybeRecordImplicitNullCheck(instruction);
4930 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004931
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004932 // /* HeapReference<Class> */ temp = temp->component_type_
4933 __ movl(temp, Address(temp, component_offset));
4934 // If heap poisoning is enabled, no need to unpoison `temp`
4935 // nor the object reference in `register_value->klass`, as
4936 // we are comparing two poisoned references.
4937 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004938
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004939 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4940 __ j(kEqual, &do_put);
4941 // If heap poisoning is enabled, the `temp` reference has
4942 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004943 __ MaybeUnpoisonHeapReference(temp);
4944
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004945 // If heap poisoning is enabled, no need to unpoison the
4946 // heap reference loaded below, as it is only used for a
4947 // comparison with null.
4948 __ cmpl(Address(temp, super_offset), Immediate(0));
4949 __ j(kNotEqual, slow_path->GetEntryLabel());
4950 __ Bind(&do_put);
4951 } else {
4952 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004953 }
4954 }
4955
4956 if (kPoisonHeapReferences) {
4957 __ movl(temp, register_value);
4958 __ PoisonHeapReference(temp);
4959 __ movl(address, temp);
4960 } else {
4961 __ movl(address, register_value);
4962 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004963 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004964 codegen_->MaybeRecordImplicitNullCheck(instruction);
4965 }
4966
4967 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4968 codegen_->MarkGCCard(
4969 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4970 __ Bind(&done);
4971
4972 if (slow_path != nullptr) {
4973 __ Bind(slow_path->GetExitLabel());
4974 }
4975
4976 break;
4977 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004978
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004979 case Primitive::kPrimInt: {
4980 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004981 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004982 if (value.IsRegister()) {
4983 __ movl(address, value.AsRegister<CpuRegister>());
4984 } else {
4985 DCHECK(value.IsConstant()) << value;
4986 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4987 __ movl(address, Immediate(v));
4988 }
4989 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004990 break;
4991 }
4992
4993 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004994 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004995 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004996 if (value.IsRegister()) {
4997 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004998 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004999 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005000 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005001 Address address_high =
5002 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005003 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005004 }
5005 break;
5006 }
5007
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005008 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005009 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005010 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005011 if (value.IsFpuRegister()) {
5012 __ movss(address, value.AsFpuRegister<XmmRegister>());
5013 } else {
5014 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005015 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005016 __ movl(address, Immediate(v));
5017 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005018 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005019 break;
5020 }
5021
5022 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005023 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005024 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005025 if (value.IsFpuRegister()) {
5026 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5027 codegen_->MaybeRecordImplicitNullCheck(instruction);
5028 } else {
5029 int64_t v =
5030 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005031 Address address_high =
5032 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005033 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5034 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005035 break;
5036 }
5037
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005038 case Primitive::kPrimVoid:
5039 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005040 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005041 }
5042}
5043
5044void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005045 LocationSummary* locations =
5046 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005047 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005048 if (!instruction->IsEmittedAtUseSite()) {
5049 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5050 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005051}
5052
5053void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005054 if (instruction->IsEmittedAtUseSite()) {
5055 return;
5056 }
5057
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005058 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005059 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005060 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5061 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005062 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005063 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005064 // Mask out most significant bit in case the array is String's array of char.
5065 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
5066 __ andl(out, Immediate(INT32_MAX));
5067 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005068}
5069
5070void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005071 RegisterSet caller_saves = RegisterSet::Empty();
5072 InvokeRuntimeCallingConvention calling_convention;
5073 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5074 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5075 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005076 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005077 HInstruction* length = instruction->InputAt(1);
5078 if (!length->IsEmittedAtUseSite()) {
5079 locations->SetInAt(1, Location::RegisterOrConstant(length));
5080 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005081}
5082
5083void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5084 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005085 Location index_loc = locations->InAt(0);
5086 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005087 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005088
Mark Mendell99dbd682015-04-22 16:18:52 -04005089 if (length_loc.IsConstant()) {
5090 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5091 if (index_loc.IsConstant()) {
5092 // BCE will remove the bounds check if we are guarenteed to pass.
5093 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5094 if (index < 0 || index >= length) {
5095 codegen_->AddSlowPath(slow_path);
5096 __ jmp(slow_path->GetEntryLabel());
5097 } else {
5098 // Some optimization after BCE may have generated this, and we should not
5099 // generate a bounds check if it is a valid range.
5100 }
5101 return;
5102 }
5103
5104 // We have to reverse the jump condition because the length is the constant.
5105 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5106 __ cmpl(index_reg, Immediate(length));
5107 codegen_->AddSlowPath(slow_path);
5108 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005109 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005110 HInstruction* array_length = instruction->InputAt(1);
5111 if (array_length->IsEmittedAtUseSite()) {
5112 // Address the length field in the array.
5113 DCHECK(array_length->IsArrayLength());
5114 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5115 Location array_loc = array_length->GetLocations()->InAt(0);
5116 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005117 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5118 CpuRegister length_reg = CpuRegister(TMP);
5119 __ movl(length_reg, array_len);
5120 codegen_->MaybeRecordImplicitNullCheck(array_length);
5121 __ andl(length_reg, Immediate(INT32_MAX));
5122 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005123 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005124 // Checking the bound for general case:
5125 // Array of char or String's array when the compression feature off.
5126 if (index_loc.IsConstant()) {
5127 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5128 __ cmpl(array_len, Immediate(value));
5129 } else {
5130 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5131 }
5132 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005133 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005134 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005135 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005136 }
5137 codegen_->AddSlowPath(slow_path);
5138 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005139 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005140}
5141
5142void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5143 CpuRegister card,
5144 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005145 CpuRegister value,
5146 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005147 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005148 if (value_can_be_null) {
5149 __ testl(value, value);
5150 __ j(kEqual, &is_null);
5151 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005152 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005153 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005154 __ movq(temp, object);
5155 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005156 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005157 if (value_can_be_null) {
5158 __ Bind(&is_null);
5159 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005160}
5161
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005162void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005163 LOG(FATAL) << "Unimplemented";
5164}
5165
5166void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005167 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5168}
5169
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005170void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005171 LocationSummary* locations =
5172 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005173 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005174}
5175
5176void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005177 HBasicBlock* block = instruction->GetBlock();
5178 if (block->GetLoopInformation() != nullptr) {
5179 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5180 // The back edge will generate the suspend check.
5181 return;
5182 }
5183 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5184 // The goto will generate the suspend check.
5185 return;
5186 }
5187 GenerateSuspendCheck(instruction, nullptr);
5188}
5189
5190void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5191 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005192 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005193 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5194 if (slow_path == nullptr) {
5195 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5196 instruction->SetSlowPath(slow_path);
5197 codegen_->AddSlowPath(slow_path);
5198 if (successor != nullptr) {
5199 DCHECK(successor->IsLoopHeader());
5200 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5201 }
5202 } else {
5203 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5204 }
5205
Andreas Gampe542451c2016-07-26 09:02:02 -07005206 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005207 /* no_rip */ true),
5208 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005209 if (successor == nullptr) {
5210 __ j(kNotEqual, slow_path->GetEntryLabel());
5211 __ Bind(slow_path->GetReturnLabel());
5212 } else {
5213 __ j(kEqual, codegen_->GetLabelOf(successor));
5214 __ jmp(slow_path->GetEntryLabel());
5215 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005216}
5217
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005218X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5219 return codegen_->GetAssembler();
5220}
5221
5222void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005223 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005224 Location source = move->GetSource();
5225 Location destination = move->GetDestination();
5226
5227 if (source.IsRegister()) {
5228 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005229 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005230 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005231 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005232 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005233 } else {
5234 DCHECK(destination.IsDoubleStackSlot());
5235 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005236 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005237 }
5238 } else if (source.IsStackSlot()) {
5239 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005240 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005241 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005242 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005243 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005244 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005245 } else {
5246 DCHECK(destination.IsStackSlot());
5247 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5248 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5249 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005250 } else if (source.IsDoubleStackSlot()) {
5251 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005252 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005253 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005254 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005255 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5256 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005257 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005258 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005259 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5260 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5261 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005262 } else if (source.IsConstant()) {
5263 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005264 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5265 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005266 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005267 if (value == 0) {
5268 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5269 } else {
5270 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5271 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005272 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005274 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005275 }
5276 } else if (constant->IsLongConstant()) {
5277 int64_t value = constant->AsLongConstant()->GetValue();
5278 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005279 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005280 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005282 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005283 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005284 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005285 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005287 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005288 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005289 } else {
5290 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005291 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005292 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5293 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005294 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005295 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005296 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005297 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005298 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005299 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005300 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005301 } else {
5302 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005303 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005304 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005305 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005306 } else if (source.IsFpuRegister()) {
5307 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005308 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005309 } else if (destination.IsStackSlot()) {
5310 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005311 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005312 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005313 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005314 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005315 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005316 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005317 }
5318}
5319
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005320void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005321 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005322 __ movl(Address(CpuRegister(RSP), mem), reg);
5323 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005324}
5325
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005326void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005327 ScratchRegisterScope ensure_scratch(
5328 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5329
5330 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5331 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5332 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5333 Address(CpuRegister(RSP), mem2 + stack_offset));
5334 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5335 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5336 CpuRegister(ensure_scratch.GetRegister()));
5337}
5338
Mark Mendell8a1c7282015-06-29 15:41:28 -04005339void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5340 __ movq(CpuRegister(TMP), reg1);
5341 __ movq(reg1, reg2);
5342 __ movq(reg2, CpuRegister(TMP));
5343}
5344
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005345void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5346 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5347 __ movq(Address(CpuRegister(RSP), mem), reg);
5348 __ movq(reg, CpuRegister(TMP));
5349}
5350
5351void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5352 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005353 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005354
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005355 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5356 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5357 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5358 Address(CpuRegister(RSP), mem2 + stack_offset));
5359 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5360 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5361 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005362}
5363
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005364void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5365 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5366 __ movss(Address(CpuRegister(RSP), mem), reg);
5367 __ movd(reg, CpuRegister(TMP));
5368}
5369
5370void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5371 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5372 __ movsd(Address(CpuRegister(RSP), mem), reg);
5373 __ movd(reg, CpuRegister(TMP));
5374}
5375
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005376void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005377 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005378 Location source = move->GetSource();
5379 Location destination = move->GetDestination();
5380
5381 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005382 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005383 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005384 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005385 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005386 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005387 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005388 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5389 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005390 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005391 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005392 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005393 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5394 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005395 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005396 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5397 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5398 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005399 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005400 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005401 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005402 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005403 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005404 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005405 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005406 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005407 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005408 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005409 }
5410}
5411
5412
5413void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5414 __ pushq(CpuRegister(reg));
5415}
5416
5417
5418void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5419 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005420}
5421
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005422void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005423 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005424 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5425 Immediate(mirror::Class::kStatusInitialized));
5426 __ j(kLess, slow_path->GetEntryLabel());
5427 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005428 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005429}
5430
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005431HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5432 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005433 switch (desired_class_load_kind) {
5434 case HLoadClass::LoadKind::kReferrersClass:
5435 break;
5436 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5437 DCHECK(!GetCompilerOptions().GetCompilePic());
5438 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5439 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5440 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5441 DCHECK(GetCompilerOptions().GetCompilePic());
5442 break;
5443 case HLoadClass::LoadKind::kBootImageAddress:
5444 break;
5445 case HLoadClass::LoadKind::kDexCacheAddress:
5446 DCHECK(Runtime::Current()->UseJitCompilation());
5447 break;
5448 case HLoadClass::LoadKind::kDexCachePcRelative:
5449 DCHECK(!Runtime::Current()->UseJitCompilation());
5450 break;
5451 case HLoadClass::LoadKind::kDexCacheViaMethod:
5452 break;
5453 }
5454 return desired_class_load_kind;
5455}
5456
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005457void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005458 if (cls->NeedsAccessCheck()) {
5459 InvokeRuntimeCallingConvention calling_convention;
5460 CodeGenerator::CreateLoadClassLocationSummary(
5461 cls,
5462 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5463 Location::RegisterLocation(RAX),
5464 /* code_generator_supports_read_barrier */ true);
5465 return;
5466 }
5467
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005468 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5469 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005470 ? LocationSummary::kCallOnSlowPath
5471 : LocationSummary::kNoCall;
5472 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005473 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005474 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005475 }
5476
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005477 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5478 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5479 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5480 locations->SetInAt(0, Location::RequiresRegister());
5481 }
5482 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005483}
5484
5485void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005486 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005487 if (cls->NeedsAccessCheck()) {
5488 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01005489 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005490 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005491 return;
5492 }
5493
Roland Levillain0d5a2812015-11-13 10:07:31 +00005494 Location out_loc = locations->Out();
5495 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005496
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005497 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005498 bool generate_null_check = false;
5499 switch (cls->GetLoadKind()) {
5500 case HLoadClass::LoadKind::kReferrersClass: {
5501 DCHECK(!cls->CanCallRuntime());
5502 DCHECK(!cls->MustGenerateClinitCheck());
5503 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5504 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5505 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005506 cls,
5507 out_loc,
5508 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005509 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005510 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005511 break;
5512 }
5513 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005514 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005515 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5516 codegen_->RecordTypePatch(cls);
5517 break;
5518 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005519 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005520 DCHECK_NE(cls->GetAddress(), 0u);
5521 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5522 __ movl(out, Immediate(address)); // Zero-extended.
5523 codegen_->RecordSimplePatch();
5524 break;
5525 }
5526 case HLoadClass::LoadKind::kDexCacheAddress: {
5527 DCHECK_NE(cls->GetAddress(), 0u);
5528 // /* GcRoot<mirror::Class> */ out = *address
5529 if (IsUint<32>(cls->GetAddress())) {
5530 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005531 GenerateGcRootFieldLoad(cls,
5532 out_loc,
5533 address,
Roland Levillain00468f32016-10-27 18:02:48 +01005534 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005535 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005536 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005537 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5538 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005539 GenerateGcRootFieldLoad(cls,
5540 out_loc,
5541 Address(out, 0),
Roland Levillain00468f32016-10-27 18:02:48 +01005542 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005543 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005544 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005545 generate_null_check = !cls->IsInDexCache();
5546 break;
5547 }
5548 case HLoadClass::LoadKind::kDexCachePcRelative: {
5549 uint32_t offset = cls->GetDexCacheElementOffset();
5550 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5551 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5552 /* no_rip */ false);
5553 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005554 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005555 generate_null_check = !cls->IsInDexCache();
5556 break;
5557 }
5558 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5559 // /* GcRoot<mirror::Class>[] */ out =
5560 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5561 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5562 __ movq(out,
5563 Address(current_method,
5564 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5565 // /* GcRoot<mirror::Class> */ out = out[type_index]
5566 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005567 cls,
5568 out_loc,
5569 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
Roland Levillain00468f32016-10-27 18:02:48 +01005570 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005571 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005572 generate_null_check = !cls->IsInDexCache();
5573 break;
5574 }
5575 default:
5576 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5577 UNREACHABLE();
5578 }
5579
5580 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5581 DCHECK(cls->CanCallRuntime());
5582 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5583 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5584 codegen_->AddSlowPath(slow_path);
5585 if (generate_null_check) {
5586 __ testl(out, out);
5587 __ j(kEqual, slow_path->GetEntryLabel());
5588 }
5589 if (cls->MustGenerateClinitCheck()) {
5590 GenerateClassInitializationCheck(slow_path, out);
5591 } else {
5592 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005593 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005594 }
5595}
5596
5597void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5598 LocationSummary* locations =
5599 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5600 locations->SetInAt(0, Location::RequiresRegister());
5601 if (check->HasUses()) {
5602 locations->SetOut(Location::SameAsFirstInput());
5603 }
5604}
5605
5606void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005607 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005608 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005609 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005610 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005611 GenerateClassInitializationCheck(slow_path,
5612 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005613}
5614
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005615HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5616 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005617 switch (desired_string_load_kind) {
5618 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5619 DCHECK(!GetCompilerOptions().GetCompilePic());
5620 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5621 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5622 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5623 DCHECK(GetCompilerOptions().GetCompilePic());
5624 break;
5625 case HLoadString::LoadKind::kBootImageAddress:
5626 break;
5627 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005628 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005629 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005630 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005631 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005632 break;
5633 case HLoadString::LoadKind::kDexCacheViaMethod:
5634 break;
5635 }
5636 return desired_string_load_kind;
5637}
5638
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005639void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005640 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
Vladimir Markoaad75c62016-10-03 08:46:48 +00005641 ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
5642 ? LocationSummary::kCallOnMainOnly
5643 : LocationSummary::kCallOnSlowPath)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005644 : LocationSummary::kNoCall;
5645 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005646 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005647 locations->SetOut(Location::RegisterLocation(RAX));
5648 } else {
5649 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005650 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5651 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5652 // Rely on the pResolveString and/or marking to save everything.
5653 // Custom calling convention: RAX serves as both input and output.
5654 RegisterSet caller_saves = RegisterSet::Empty();
5655 caller_saves.Add(Location::RegisterLocation(RAX));
5656 locations->SetCustomSlowPathCallerSaves(caller_saves);
5657 } else {
5658 // For non-Baker read barrier we have a temp-clobbering call.
5659 }
5660 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005661 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005662}
5663
5664void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005665 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005666 Location out_loc = locations->Out();
5667 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005668
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005669 switch (load->GetLoadKind()) {
5670 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005671 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005672 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005673 return; // No dex cache slow path.
5674 }
5675 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005676 DCHECK_NE(load->GetAddress(), 0u);
5677 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5678 __ movl(out, Immediate(address)); // Zero-extended.
5679 codegen_->RecordSimplePatch();
5680 return; // No dex cache slow path.
5681 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005682 case HLoadString::LoadKind::kBssEntry: {
5683 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5684 /* no_rip */ false);
5685 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5686 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Roland Levillain00468f32016-10-27 18:02:48 +01005687 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005688 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5689 codegen_->AddSlowPath(slow_path);
5690 __ testl(out, out);
5691 __ j(kEqual, slow_path->GetEntryLabel());
5692 __ Bind(slow_path->GetExitLabel());
5693 return;
5694 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005695 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005696 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005697 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005698
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005699 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005700 // Custom calling convention: RAX serves as both input and output.
5701 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex()));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005702 codegen_->InvokeRuntime(kQuickResolveString,
5703 load,
5704 load->GetDexPc());
5705 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005706}
5707
David Brazdilcb1c0552015-08-04 16:22:25 +01005708static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005709 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005710 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005711}
5712
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005713void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5714 LocationSummary* locations =
5715 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5716 locations->SetOut(Location::RequiresRegister());
5717}
5718
5719void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005720 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5721}
5722
5723void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5724 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5725}
5726
5727void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5728 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005729}
5730
5731void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5732 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005733 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005734 InvokeRuntimeCallingConvention calling_convention;
5735 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5736}
5737
5738void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005739 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005740 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005741}
5742
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005743static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005744 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
5745 // We need a temporary for holding the iftable length.
5746 return true;
5747 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005748 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005749 !kUseBakerReadBarrier &&
5750 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005751 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5752 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5753}
5754
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005755void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005756 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005757 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005758 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005759 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005760 case TypeCheckKind::kExactCheck:
5761 case TypeCheckKind::kAbstractClassCheck:
5762 case TypeCheckKind::kClassHierarchyCheck:
5763 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005764 call_kind =
5765 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005766 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767 break;
5768 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005769 case TypeCheckKind::kUnresolvedCheck:
5770 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005771 call_kind = LocationSummary::kCallOnSlowPath;
5772 break;
5773 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005775 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005776 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005777 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005778 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005779 locations->SetInAt(0, Location::RequiresRegister());
5780 locations->SetInAt(1, Location::Any());
5781 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5782 locations->SetOut(Location::RequiresRegister());
5783 // When read barriers are enabled, we need a temporary register for
5784 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005785 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005786 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005787 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005788}
5789
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005790void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005791 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005792 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 Location obj_loc = locations->InAt(0);
5794 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005795 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005796 Location out_loc = locations->Out();
5797 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005798 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005799 locations->GetTemp(0) :
5800 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005801 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005802 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5803 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5804 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005805 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005806 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005807
5808 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005809 // Avoid null check if we know obj is not null.
5810 if (instruction->MustDoNullCheck()) {
5811 __ testl(obj, obj);
5812 __ j(kEqual, &zero);
5813 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 // /* HeapReference<Class> */ out = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005816 GenerateReferenceLoadTwoRegisters(instruction,
5817 out_loc,
5818 obj_loc,
5819 class_offset,
5820 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005821
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005822 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005823 case TypeCheckKind::kExactCheck: {
5824 if (cls.IsRegister()) {
5825 __ cmpl(out, cls.AsRegister<CpuRegister>());
5826 } else {
5827 DCHECK(cls.IsStackSlot()) << cls;
5828 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5829 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005830 if (zero.IsLinked()) {
5831 // Classes must be equal for the instanceof to succeed.
5832 __ j(kNotEqual, &zero);
5833 __ movl(out, Immediate(1));
5834 __ jmp(&done);
5835 } else {
5836 __ setcc(kEqual, out);
5837 // setcc only sets the low byte.
5838 __ andl(out, Immediate(1));
5839 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005840 break;
5841 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005842
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005843 case TypeCheckKind::kAbstractClassCheck: {
5844 // If the class is abstract, we eagerly fetch the super class of the
5845 // object to avoid doing a comparison we know will fail.
5846 NearLabel loop, success;
5847 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005848 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005849 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850 __ testl(out, out);
5851 // If `out` is null, we use it for the result, and jump to `done`.
5852 __ j(kEqual, &done);
5853 if (cls.IsRegister()) {
5854 __ cmpl(out, cls.AsRegister<CpuRegister>());
5855 } else {
5856 DCHECK(cls.IsStackSlot()) << cls;
5857 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5858 }
5859 __ j(kNotEqual, &loop);
5860 __ movl(out, Immediate(1));
5861 if (zero.IsLinked()) {
5862 __ jmp(&done);
5863 }
5864 break;
5865 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005867 case TypeCheckKind::kClassHierarchyCheck: {
5868 // Walk over the class hierarchy to find a match.
5869 NearLabel loop, success;
5870 __ Bind(&loop);
5871 if (cls.IsRegister()) {
5872 __ cmpl(out, cls.AsRegister<CpuRegister>());
5873 } else {
5874 DCHECK(cls.IsStackSlot()) << cls;
5875 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5876 }
5877 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005878 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005879 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005880 __ testl(out, out);
5881 __ j(kNotEqual, &loop);
5882 // If `out` is null, we use it for the result, and jump to `done`.
5883 __ jmp(&done);
5884 __ Bind(&success);
5885 __ movl(out, Immediate(1));
5886 if (zero.IsLinked()) {
5887 __ jmp(&done);
5888 }
5889 break;
5890 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005891
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005892 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005893 // Do an exact check.
5894 NearLabel exact_check;
5895 if (cls.IsRegister()) {
5896 __ cmpl(out, cls.AsRegister<CpuRegister>());
5897 } else {
5898 DCHECK(cls.IsStackSlot()) << cls;
5899 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5900 }
5901 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005902 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005903 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005904 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005905 __ testl(out, out);
5906 // If `out` is null, we use it for the result, and jump to `done`.
5907 __ j(kEqual, &done);
5908 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5909 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005910 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005911 __ movl(out, Immediate(1));
5912 __ jmp(&done);
5913 break;
5914 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005915
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005916 case TypeCheckKind::kArrayCheck: {
5917 if (cls.IsRegister()) {
5918 __ cmpl(out, cls.AsRegister<CpuRegister>());
5919 } else {
5920 DCHECK(cls.IsStackSlot()) << cls;
5921 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5922 }
5923 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005924 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5925 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005926 codegen_->AddSlowPath(slow_path);
5927 __ j(kNotEqual, slow_path->GetEntryLabel());
5928 __ movl(out, Immediate(1));
5929 if (zero.IsLinked()) {
5930 __ jmp(&done);
5931 }
5932 break;
5933 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005934
Calin Juravle98893e12015-10-02 21:05:03 +01005935 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005936 case TypeCheckKind::kInterfaceCheck: {
5937 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005938 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005939 // cases.
5940 //
5941 // We cannot directly call the InstanceofNonTrivial runtime
5942 // entry point without resorting to a type checking slow path
5943 // here (i.e. by calling InvokeRuntime directly), as it would
5944 // require to assign fixed registers for the inputs of this
5945 // HInstanceOf instruction (following the runtime calling
5946 // convention), which might be cluttered by the potential first
5947 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005948 //
5949 // TODO: Introduce a new runtime entry point taking the object
5950 // to test (instead of its class) as argument, and let it deal
5951 // with the read barrier issues. This will let us refactor this
5952 // case of the `switch` code as it was previously (with a direct
5953 // call to the runtime not using a type checking slow path).
5954 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005955 DCHECK(locations->OnlyCallsOnSlowPath());
5956 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5957 /* is_fatal */ false);
5958 codegen_->AddSlowPath(slow_path);
5959 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005960 if (zero.IsLinked()) {
5961 __ jmp(&done);
5962 }
5963 break;
5964 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005965 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005966
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005967 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005968 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005969 __ xorl(out, out);
5970 }
5971
5972 if (done.IsLinked()) {
5973 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005974 }
5975
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005976 if (slow_path != nullptr) {
5977 __ Bind(slow_path->GetExitLabel());
5978 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005979}
5980
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005981void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005982 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5983 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005984 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5985 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005986 case TypeCheckKind::kExactCheck:
5987 case TypeCheckKind::kAbstractClassCheck:
5988 case TypeCheckKind::kClassHierarchyCheck:
5989 case TypeCheckKind::kArrayObjectCheck:
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005990 case TypeCheckKind::kInterfaceCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005991 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5992 LocationSummary::kCallOnSlowPath :
5993 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005994 break;
5995 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005996 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005997 call_kind = LocationSummary::kCallOnSlowPath;
5998 break;
5999 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006000 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6001 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006002 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6003 // Require a register for the interface check since there is a loop that compares the class to
6004 // a memory address.
6005 locations->SetInAt(1, Location::RequiresRegister());
6006 } else {
6007 locations->SetInAt(1, Location::Any());
6008 }
6009
Roland Levillain0d5a2812015-11-13 10:07:31 +00006010 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6011 locations->AddTemp(Location::RequiresRegister());
6012 // When read barriers are enabled, we need an additional temporary
6013 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006014 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006015 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006016 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006017}
6018
6019void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006020 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006021 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006022 Location obj_loc = locations->InAt(0);
6023 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006024 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006025 Location temp_loc = locations->GetTemp(0);
6026 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006027 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006028 locations->GetTemp(1) :
6029 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006030 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6031 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6032 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6033 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6034 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6035 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
6036 const int object_array_data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006037
Roland Levillain0d5a2812015-11-13 10:07:31 +00006038 bool is_type_check_slow_path_fatal =
6039 (type_check_kind == TypeCheckKind::kExactCheck ||
6040 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
6041 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6042 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
6043 !instruction->CanThrowIntoCatchBlock();
6044 SlowPathCode* type_check_slow_path =
6045 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6046 is_type_check_slow_path_fatal);
6047 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006048
Roland Levillain0d5a2812015-11-13 10:07:31 +00006049 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006050 case TypeCheckKind::kExactCheck:
6051 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006052 NearLabel done;
6053 // Avoid null check if we know obj is not null.
6054 if (instruction->MustDoNullCheck()) {
6055 __ testl(obj, obj);
6056 __ j(kEqual, &done);
6057 }
6058
6059 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006060 GenerateReferenceLoadTwoRegisters(instruction,
6061 temp_loc,
6062 obj_loc,
6063 class_offset,
6064 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006065 if (cls.IsRegister()) {
6066 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6067 } else {
6068 DCHECK(cls.IsStackSlot()) << cls;
6069 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6070 }
6071 // Jump to slow path for throwing the exception or doing a
6072 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006073 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006074 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006075 break;
6076 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006077
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006078 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006079 NearLabel done;
6080 // Avoid null check if we know obj is not null.
6081 if (instruction->MustDoNullCheck()) {
6082 __ testl(obj, obj);
6083 __ j(kEqual, &done);
6084 }
6085
6086 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006087 GenerateReferenceLoadTwoRegisters(instruction,
6088 temp_loc,
6089 obj_loc,
6090 class_offset,
6091 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006092 // If the class is abstract, we eagerly fetch the super class of the
6093 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006094 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006095 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006096 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006097 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006098
6099 // If the class reference currently in `temp` is not null, jump
6100 // to the `compare_classes` label to compare it with the checked
6101 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006102 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006103 __ j(kNotEqual, &compare_classes);
6104 // Otherwise, jump to the slow path to throw the exception.
6105 //
6106 // But before, move back the object's class into `temp` before
6107 // going into the slow path, as it has been overwritten in the
6108 // meantime.
6109 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006110 GenerateReferenceLoadTwoRegisters(instruction,
6111 temp_loc,
6112 obj_loc,
6113 class_offset,
6114 kEmitCompilerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006115 __ jmp(type_check_slow_path->GetEntryLabel());
6116
6117 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006118 if (cls.IsRegister()) {
6119 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6120 } else {
6121 DCHECK(cls.IsStackSlot()) << cls;
6122 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6123 }
6124 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006125 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 break;
6127 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006128
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006129 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006130 NearLabel done;
6131 // Avoid null check if we know obj is not null.
6132 if (instruction->MustDoNullCheck()) {
6133 __ testl(obj, obj);
6134 __ j(kEqual, &done);
6135 }
6136
6137 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006138 GenerateReferenceLoadTwoRegisters(instruction,
6139 temp_loc,
6140 obj_loc,
6141 class_offset,
6142 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006143 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006144 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006145 __ Bind(&loop);
6146 if (cls.IsRegister()) {
6147 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6148 } else {
6149 DCHECK(cls.IsStackSlot()) << cls;
6150 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6151 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006152 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006153
Roland Levillain0d5a2812015-11-13 10:07:31 +00006154 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006155 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006156
6157 // If the class reference currently in `temp` is not null, jump
6158 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006159 __ testl(temp, temp);
6160 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006161 // Otherwise, jump to the slow path to throw the exception.
6162 //
6163 // But before, move back the object's class into `temp` before
6164 // going into the slow path, as it has been overwritten in the
6165 // meantime.
6166 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006167 GenerateReferenceLoadTwoRegisters(instruction,
6168 temp_loc,
6169 obj_loc,
6170 class_offset,
6171 kEmitCompilerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006172 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006173 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006174 break;
6175 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006176
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006177 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006178 // We cannot use a NearLabel here, as its range might be too
6179 // short in some cases when read barriers are enabled. This has
6180 // been observed for instance when the code emitted for this
6181 // case uses high x86-64 registers (R8-R15).
6182 Label done;
6183 // Avoid null check if we know obj is not null.
6184 if (instruction->MustDoNullCheck()) {
6185 __ testl(obj, obj);
6186 __ j(kEqual, &done);
6187 }
6188
6189 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006190 GenerateReferenceLoadTwoRegisters(instruction,
6191 temp_loc,
6192 obj_loc,
6193 class_offset,
6194 kEmitCompilerReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006195 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006196 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006197 if (cls.IsRegister()) {
6198 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6199 } else {
6200 DCHECK(cls.IsStackSlot()) << cls;
6201 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6202 }
6203 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006204
6205 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006206 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006207 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006208
6209 // If the component type is not null (i.e. the object is indeed
6210 // an array), jump to label `check_non_primitive_component_type`
6211 // to further check that this component type is not a primitive
6212 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006213 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006214 __ j(kNotEqual, &check_non_primitive_component_type);
6215 // Otherwise, jump to the slow path to throw the exception.
6216 //
6217 // But before, move back the object's class into `temp` before
6218 // going into the slow path, as it has been overwritten in the
6219 // meantime.
6220 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006221 GenerateReferenceLoadTwoRegisters(instruction,
6222 temp_loc,
6223 obj_loc,
6224 class_offset,
6225 kEmitCompilerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006226 __ jmp(type_check_slow_path->GetEntryLabel());
6227
6228 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006229 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006230 __ j(kEqual, &done);
6231 // Same comment as above regarding `temp` and the slow path.
6232 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006233 GenerateReferenceLoadTwoRegisters(instruction,
6234 temp_loc,
6235 obj_loc,
6236 class_offset,
6237 kEmitCompilerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006238 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006239 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006240 break;
6241 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006242
Calin Juravle98893e12015-10-02 21:05:03 +01006243 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006244 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00006245 NearLabel done;
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006246
Roland Levillain86503782016-02-11 19:07:30 +00006247 // Avoid null check if we know obj is not null.
6248 if (instruction->MustDoNullCheck()) {
6249 __ testl(obj, obj);
6250 __ j(kEqual, &done);
6251 }
6252
Roland Levillain86503782016-02-11 19:07:30 +00006253
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006254 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006255 //
6256 // We cannot directly call the CheckCast runtime entry point
6257 // without resorting to a type checking slow path here (i.e. by
6258 // calling InvokeRuntime directly), as it would require to
6259 // assign fixed registers for the inputs of this HInstanceOf
6260 // instruction (following the runtime calling convention), which
6261 // might be cluttered by the potential first read barrier
6262 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006263 //
6264 // TODO: Introduce a new runtime entry point taking the object
6265 // to test (instead of its class) as argument, and let it deal
6266 // with the read barrier issues. This will let us refactor this
6267 // case of the `switch` code as it was previously (with a direct
6268 // call to the runtime not using a type checking slow path).
6269 // This should also be beneficial for the other cases above.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006270
6271 // Fast path for the interface check. Since we compare with a memory location in the inner
6272 // loop we would need to have cls poisoned. However unpoisoning cls would reset the
6273 // conditional flags and cause the conditional jump to be incorrect.
6274 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
6275 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6276 // doing this.
6277 // /* HeapReference<Class> */ temp = obj->klass_
6278 GenerateReferenceLoadTwoRegisters(instruction,
6279 temp_loc,
6280 obj_loc,
6281 class_offset,
6282 /*emit_read_barrier*/ false);
6283
6284 // /* HeapReference<Class> */ temp = temp->iftable_
6285 GenerateReferenceLoadTwoRegisters(instruction,
6286 temp_loc,
6287 temp_loc,
6288 iftable_offset,
6289 /*emit_read_barrier*/ false);
6290 NearLabel is_null;
6291 // Null iftable means it is empty.
6292 __ testl(temp_loc.AsRegister<CpuRegister>(), temp_loc.AsRegister<CpuRegister>());
6293 __ j(kZero, &is_null);
6294
6295 // Loop through the iftable and check if any class matches.
6296 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(),
6297 Address(temp_loc.AsRegister<CpuRegister>(), array_length_offset));
6298
6299 NearLabel start_loop;
6300 __ Bind(&start_loop);
6301 __ cmpl(cls.AsRegister<CpuRegister>(),
6302 Address(temp_loc.AsRegister<CpuRegister>(), object_array_data_offset));
6303 __ j(kEqual, &done); // Return if same class.
6304 // Go to next interface.
6305 __ addq(temp_loc.AsRegister<CpuRegister>(), Immediate(2 * kHeapReferenceSize));
6306 __ subq(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6307 __ j(kNotZero, &start_loop);
6308 __ Bind(&is_null);
6309 }
6310
6311 // Since we clobbered temp_loc holding the class, we need to reload it.
6312 GenerateReferenceLoadTwoRegisters(instruction,
6313 temp_loc,
6314 obj_loc,
6315 class_offset,
6316 kEmitCompilerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006317 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006318 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006319 break;
6320 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006321
Roland Levillain0d5a2812015-11-13 10:07:31 +00006322 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006323}
6324
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006325void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6326 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006327 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006328 InvokeRuntimeCallingConvention calling_convention;
6329 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6330}
6331
6332void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006333 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006334 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006335 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006336 if (instruction->IsEnter()) {
6337 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6338 } else {
6339 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6340 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006341}
6342
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006343void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6344void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6345void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6346
6347void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6348 LocationSummary* locations =
6349 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6350 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6351 || instruction->GetResultType() == Primitive::kPrimLong);
6352 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006353 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006354 locations->SetOut(Location::SameAsFirstInput());
6355}
6356
6357void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6358 HandleBitwiseOperation(instruction);
6359}
6360
6361void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6362 HandleBitwiseOperation(instruction);
6363}
6364
6365void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6366 HandleBitwiseOperation(instruction);
6367}
6368
6369void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6370 LocationSummary* locations = instruction->GetLocations();
6371 Location first = locations->InAt(0);
6372 Location second = locations->InAt(1);
6373 DCHECK(first.Equals(locations->Out()));
6374
6375 if (instruction->GetResultType() == Primitive::kPrimInt) {
6376 if (second.IsRegister()) {
6377 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006378 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006379 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006380 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006381 } else {
6382 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006383 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006384 }
6385 } else if (second.IsConstant()) {
6386 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6387 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006388 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006389 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006390 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006391 } else {
6392 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006393 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006394 }
6395 } else {
6396 Address address(CpuRegister(RSP), second.GetStackIndex());
6397 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006398 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006399 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006400 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006401 } else {
6402 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006403 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006404 }
6405 }
6406 } else {
6407 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006408 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6409 bool second_is_constant = false;
6410 int64_t value = 0;
6411 if (second.IsConstant()) {
6412 second_is_constant = true;
6413 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006414 }
Mark Mendell40741f32015-04-20 22:10:34 -04006415 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006416
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006417 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006418 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006419 if (is_int32_value) {
6420 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6421 } else {
6422 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6423 }
6424 } else if (second.IsDoubleStackSlot()) {
6425 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006426 } else {
6427 __ andq(first_reg, second.AsRegister<CpuRegister>());
6428 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006429 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006430 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006431 if (is_int32_value) {
6432 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6433 } else {
6434 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6435 }
6436 } else if (second.IsDoubleStackSlot()) {
6437 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006438 } else {
6439 __ orq(first_reg, second.AsRegister<CpuRegister>());
6440 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006441 } else {
6442 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006443 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006444 if (is_int32_value) {
6445 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6446 } else {
6447 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6448 }
6449 } else if (second.IsDoubleStackSlot()) {
6450 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006451 } else {
6452 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6453 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006454 }
6455 }
6456}
6457
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006458void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6459 Location out,
6460 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006461 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006462 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6463 if (kEmitCompilerReadBarrier) {
6464 if (kUseBakerReadBarrier) {
6465 // Load with fast path based Baker's read barrier.
6466 // /* HeapReference<Object> */ out = *(out + offset)
6467 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006468 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006469 } else {
6470 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006471 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006472 // in the following move operation, as we will need it for the
6473 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006474 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006475 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006476 // /* HeapReference<Object> */ out = *(out + offset)
6477 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006478 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006479 }
6480 } else {
6481 // Plain load with no read barrier.
6482 // /* HeapReference<Object> */ out = *(out + offset)
6483 __ movl(out_reg, Address(out_reg, offset));
6484 __ MaybeUnpoisonHeapReference(out_reg);
6485 }
6486}
6487
6488void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6489 Location out,
6490 Location obj,
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006491 uint32_t offset,
6492 bool emit_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006493 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6494 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006495 if (emit_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006496 if (kUseBakerReadBarrier) {
6497 // Load with fast path based Baker's read barrier.
6498 // /* HeapReference<Object> */ out = *(obj + offset)
6499 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006500 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006501 } else {
6502 // Load with slow path based read barrier.
6503 // /* HeapReference<Object> */ out = *(obj + offset)
6504 __ movl(out_reg, Address(obj_reg, offset));
6505 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6506 }
6507 } else {
6508 // Plain load with no read barrier.
6509 // /* HeapReference<Object> */ out = *(obj + offset)
6510 __ movl(out_reg, Address(obj_reg, offset));
6511 __ MaybeUnpoisonHeapReference(out_reg);
6512 }
6513}
6514
6515void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6516 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006517 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006518 Label* fixup_label,
6519 bool requires_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006520 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006521 if (requires_read_barrier) {
6522 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006523 if (kUseBakerReadBarrier) {
6524 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6525 // Baker's read barrier are used:
6526 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006527 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528 // if (Thread::Current()->GetIsGcMarking()) {
6529 // root = ReadBarrier::Mark(root)
6530 // }
6531
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006532 // /* GcRoot<mirror::Object> */ root = *address
6533 __ movl(root_reg, address);
6534 if (fixup_label != nullptr) {
6535 __ Bind(fixup_label);
6536 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006537 static_assert(
6538 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6539 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6540 "have different sizes.");
6541 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6542 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6543 "have different sizes.");
6544
Vladimir Marko953437b2016-08-24 08:30:46 +00006545 // Slow path marking the GC root `root`.
6546 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006547 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006548 codegen_->AddSlowPath(slow_path);
6549
Andreas Gampe542451c2016-07-26 09:02:02 -07006550 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006551 /* no_rip */ true),
6552 Immediate(0));
6553 __ j(kNotEqual, slow_path->GetEntryLabel());
6554 __ Bind(slow_path->GetExitLabel());
6555 } else {
6556 // GC root loaded through a slow path for read barriers other
6557 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006558 // /* GcRoot<mirror::Object>* */ root = address
6559 __ leaq(root_reg, address);
6560 if (fixup_label != nullptr) {
6561 __ Bind(fixup_label);
6562 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006563 // /* mirror::Object* */ root = root->Read()
6564 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6565 }
6566 } else {
6567 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006568 // /* GcRoot<mirror::Object> */ root = *address
6569 __ movl(root_reg, address);
6570 if (fixup_label != nullptr) {
6571 __ Bind(fixup_label);
6572 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006573 // Note that GC roots are not affected by heap poisoning, thus we
6574 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006575 }
6576}
6577
6578void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6579 Location ref,
6580 CpuRegister obj,
6581 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582 bool needs_null_check) {
6583 DCHECK(kEmitCompilerReadBarrier);
6584 DCHECK(kUseBakerReadBarrier);
6585
6586 // /* HeapReference<Object> */ ref = *(obj + offset)
6587 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006588 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006589}
6590
6591void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6592 Location ref,
6593 CpuRegister obj,
6594 uint32_t data_offset,
6595 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006596 bool needs_null_check) {
6597 DCHECK(kEmitCompilerReadBarrier);
6598 DCHECK(kUseBakerReadBarrier);
6599
Roland Levillain3d312422016-06-23 13:53:42 +01006600 static_assert(
6601 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6602 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006603 // /* HeapReference<Object> */ ref =
6604 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006605 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006606 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006607}
6608
6609void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6610 Location ref,
6611 CpuRegister obj,
6612 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006613 bool needs_null_check,
6614 bool always_update_field,
6615 CpuRegister* temp1,
6616 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006617 DCHECK(kEmitCompilerReadBarrier);
6618 DCHECK(kUseBakerReadBarrier);
6619
6620 // In slow path based read barriers, the read barrier call is
6621 // inserted after the original load. However, in fast path based
6622 // Baker's read barriers, we need to perform the load of
6623 // mirror::Object::monitor_ *before* the original reference load.
6624 // This load-load ordering is required by the read barrier.
6625 // The fast path/slow path (for Baker's algorithm) should look like:
6626 //
6627 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6628 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6629 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006630 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006631 // if (is_gray) {
6632 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6633 // }
6634 //
6635 // Note: the original implementation in ReadBarrier::Barrier is
6636 // slightly more complex as:
6637 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006638 // the high-bits of rb_state, which are expected to be all zeroes
6639 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6640 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006641 // - it performs additional checks that we do not do here for
6642 // performance reasons.
6643
6644 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006645 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6646
Vladimir Marko953437b2016-08-24 08:30:46 +00006647 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006648 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6649 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006650 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6651 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6652 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6653
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006654 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006655 // ref = ReadBarrier::Mark(ref);
6656 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6657 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006658 if (needs_null_check) {
6659 MaybeRecordImplicitNullCheck(instruction);
6660 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006661
6662 // Load fence to prevent load-load reordering.
6663 // Note that this is a no-op, thanks to the x86-64 memory model.
6664 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6665
6666 // The actual reference load.
6667 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006668 __ movl(ref_reg, src); // Flags are unaffected.
6669
6670 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6671 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006672 SlowPathCode* slow_path;
6673 if (always_update_field) {
6674 DCHECK(temp1 != nullptr);
6675 DCHECK(temp2 != nullptr);
6676 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6677 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6678 } else {
6679 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6680 instruction, ref, /* unpoison_ref_before_marking */ true);
6681 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006682 AddSlowPath(slow_path);
6683
6684 // We have done the "if" of the gray bit check above, now branch based on the flags.
6685 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006686
6687 // Object* ref = ref_addr->AsMirrorPtr()
6688 __ MaybeUnpoisonHeapReference(ref_reg);
6689
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006690 __ Bind(slow_path->GetExitLabel());
6691}
6692
6693void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6694 Location out,
6695 Location ref,
6696 Location obj,
6697 uint32_t offset,
6698 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006699 DCHECK(kEmitCompilerReadBarrier);
6700
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006701 // Insert a slow path based read barrier *after* the reference load.
6702 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006703 // If heap poisoning is enabled, the unpoisoning of the loaded
6704 // reference will be carried out by the runtime within the slow
6705 // path.
6706 //
6707 // Note that `ref` currently does not get unpoisoned (when heap
6708 // poisoning is enabled), which is alright as the `ref` argument is
6709 // not used by the artReadBarrierSlow entry point.
6710 //
6711 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6712 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6713 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6714 AddSlowPath(slow_path);
6715
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 __ jmp(slow_path->GetEntryLabel());
6717 __ Bind(slow_path->GetExitLabel());
6718}
6719
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006720void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6721 Location out,
6722 Location ref,
6723 Location obj,
6724 uint32_t offset,
6725 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006726 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006727 // Baker's read barriers shall be handled by the fast path
6728 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6729 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006730 // If heap poisoning is enabled, unpoisoning will be taken care of
6731 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006732 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006733 } else if (kPoisonHeapReferences) {
6734 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6735 }
6736}
6737
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006738void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6739 Location out,
6740 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006741 DCHECK(kEmitCompilerReadBarrier);
6742
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006743 // Insert a slow path based read barrier *after* the GC root load.
6744 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745 // Note that GC roots are not affected by heap poisoning, so we do
6746 // not need to do anything special for this here.
6747 SlowPathCode* slow_path =
6748 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6749 AddSlowPath(slow_path);
6750
Roland Levillain0d5a2812015-11-13 10:07:31 +00006751 __ jmp(slow_path->GetEntryLabel());
6752 __ Bind(slow_path->GetExitLabel());
6753}
6754
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006755void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006756 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006757 LOG(FATAL) << "Unreachable";
6758}
6759
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006760void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006761 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006762 LOG(FATAL) << "Unreachable";
6763}
6764
Mark Mendellfe57faa2015-09-18 09:26:15 -04006765// Simple implementation of packed switch - generate cascaded compare/jumps.
6766void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6767 LocationSummary* locations =
6768 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6769 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006770 locations->AddTemp(Location::RequiresRegister());
6771 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006772}
6773
6774void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6775 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006776 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006777 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006778 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6779 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6780 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006781 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6782
6783 // Should we generate smaller inline compare/jumps?
6784 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6785 // Figure out the correct compare values and jump conditions.
6786 // Handle the first compare/branch as a special case because it might
6787 // jump to the default case.
6788 DCHECK_GT(num_entries, 2u);
6789 Condition first_condition;
6790 uint32_t index;
6791 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6792 if (lower_bound != 0) {
6793 first_condition = kLess;
6794 __ cmpl(value_reg_in, Immediate(lower_bound));
6795 __ j(first_condition, codegen_->GetLabelOf(default_block));
6796 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6797
6798 index = 1;
6799 } else {
6800 // Handle all the compare/jumps below.
6801 first_condition = kBelow;
6802 index = 0;
6803 }
6804
6805 // Handle the rest of the compare/jumps.
6806 for (; index + 1 < num_entries; index += 2) {
6807 int32_t compare_to_value = lower_bound + index + 1;
6808 __ cmpl(value_reg_in, Immediate(compare_to_value));
6809 // Jump to successors[index] if value < case_value[index].
6810 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6811 // Jump to successors[index + 1] if value == case_value[index + 1].
6812 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6813 }
6814
6815 if (index != num_entries) {
6816 // There are an odd number of entries. Handle the last one.
6817 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006818 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006819 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6820 }
6821
6822 // And the default for any other value.
6823 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6824 __ jmp(codegen_->GetLabelOf(default_block));
6825 }
6826 return;
6827 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006828
6829 // Remove the bias, if needed.
6830 Register value_reg_out = value_reg_in.AsRegister();
6831 if (lower_bound != 0) {
6832 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6833 value_reg_out = temp_reg.AsRegister();
6834 }
6835 CpuRegister value_reg(value_reg_out);
6836
6837 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006838 __ cmpl(value_reg, Immediate(num_entries - 1));
6839 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006840
Mark Mendell9c86b482015-09-18 13:36:07 -04006841 // We are in the range of the table.
6842 // Load the address of the jump table in the constant area.
6843 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006844
Mark Mendell9c86b482015-09-18 13:36:07 -04006845 // Load the (signed) offset from the jump table.
6846 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6847
6848 // Add the offset to the address of the table base.
6849 __ addq(temp_reg, base_reg);
6850
6851 // And jump.
6852 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006853}
6854
Aart Bikc5d47542016-01-27 17:00:35 -08006855void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6856 if (value == 0) {
6857 __ xorl(dest, dest);
6858 } else {
6859 __ movl(dest, Immediate(value));
6860 }
6861}
6862
Mark Mendell92e83bf2015-05-07 11:25:03 -04006863void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6864 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006865 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006866 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006867 } else if (IsUint<32>(value)) {
6868 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006869 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6870 } else {
6871 __ movq(dest, Immediate(value));
6872 }
6873}
6874
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006875void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6876 if (value == 0) {
6877 __ xorps(dest, dest);
6878 } else {
6879 __ movss(dest, LiteralInt32Address(value));
6880 }
6881}
6882
6883void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6884 if (value == 0) {
6885 __ xorpd(dest, dest);
6886 } else {
6887 __ movsd(dest, LiteralInt64Address(value));
6888 }
6889}
6890
6891void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6892 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6893}
6894
6895void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6896 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6897}
6898
Aart Bika19616e2016-02-01 18:57:58 -08006899void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6900 if (value == 0) {
6901 __ testl(dest, dest);
6902 } else {
6903 __ cmpl(dest, Immediate(value));
6904 }
6905}
6906
6907void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6908 if (IsInt<32>(value)) {
6909 if (value == 0) {
6910 __ testq(dest, dest);
6911 } else {
6912 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6913 }
6914 } else {
6915 // Value won't fit in an int.
6916 __ cmpq(dest, LiteralInt64Address(value));
6917 }
6918}
6919
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006920void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6921 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006922 GenerateIntCompare(lhs_reg, rhs);
6923}
6924
6925void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006926 if (rhs.IsConstant()) {
6927 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006928 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006929 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006930 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006931 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006932 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006933 }
6934}
6935
6936void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6937 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6938 if (rhs.IsConstant()) {
6939 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6940 Compare64BitValue(lhs_reg, value);
6941 } else if (rhs.IsDoubleStackSlot()) {
6942 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6943 } else {
6944 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6945 }
6946}
6947
6948Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6949 Location index,
6950 ScaleFactor scale,
6951 uint32_t data_offset) {
6952 return index.IsConstant() ?
6953 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6954 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6955}
6956
Mark Mendellcfa410b2015-05-25 16:02:44 -04006957void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6958 DCHECK(dest.IsDoubleStackSlot());
6959 if (IsInt<32>(value)) {
6960 // Can move directly as an int32 constant.
6961 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6962 Immediate(static_cast<int32_t>(value)));
6963 } else {
6964 Load64BitValue(CpuRegister(TMP), value);
6965 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6966 }
6967}
6968
Mark Mendell9c86b482015-09-18 13:36:07 -04006969/**
6970 * Class to handle late fixup of offsets into constant area.
6971 */
6972class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6973 public:
6974 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6975 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6976
6977 protected:
6978 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6979
6980 CodeGeneratorX86_64* codegen_;
6981
6982 private:
6983 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6984 // Patch the correct offset for the instruction. We use the address of the
6985 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6986 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6987 int32_t relative_position = constant_offset - pos;
6988
6989 // Patch in the right value.
6990 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6991 }
6992
6993 // Location in constant area that the fixup refers to.
6994 size_t offset_into_constant_area_;
6995};
6996
6997/**
6998 t * Class to handle late fixup of offsets to a jump table that will be created in the
6999 * constant area.
7000 */
7001class JumpTableRIPFixup : public RIPFixup {
7002 public:
7003 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7004 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7005
7006 void CreateJumpTable() {
7007 X86_64Assembler* assembler = codegen_->GetAssembler();
7008
7009 // Ensure that the reference to the jump table has the correct offset.
7010 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7011 SetOffset(offset_in_constant_table);
7012
7013 // Compute the offset from the start of the function to this jump table.
7014 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7015
7016 // Populate the jump table with the correct values for the jump table.
7017 int32_t num_entries = switch_instr_->GetNumEntries();
7018 HBasicBlock* block = switch_instr_->GetBlock();
7019 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7020 // The value that we want is the target offset - the position of the table.
7021 for (int32_t i = 0; i < num_entries; i++) {
7022 HBasicBlock* b = successors[i];
7023 Label* l = codegen_->GetLabelOf(b);
7024 DCHECK(l->IsBound());
7025 int32_t offset_to_block = l->Position() - current_table_offset;
7026 assembler->AppendInt32(offset_to_block);
7027 }
7028 }
7029
7030 private:
7031 const HPackedSwitch* switch_instr_;
7032};
7033
Mark Mendellf55c3e02015-03-26 21:07:46 -04007034void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7035 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007036 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007037 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7038 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007039 assembler->Align(4, 0);
7040 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007041
7042 // Populate any jump tables.
7043 for (auto jump_table : fixups_to_jump_tables_) {
7044 jump_table->CreateJumpTable();
7045 }
7046
7047 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007048 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007049 }
7050
7051 // And finish up.
7052 CodeGenerator::Finalize(allocator);
7053}
7054
Mark Mendellf55c3e02015-03-26 21:07:46 -04007055Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7056 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7057 return Address::RIP(fixup);
7058}
7059
7060Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7061 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7062 return Address::RIP(fixup);
7063}
7064
7065Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7066 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7067 return Address::RIP(fixup);
7068}
7069
7070Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7071 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7072 return Address::RIP(fixup);
7073}
7074
Andreas Gampe85b62f22015-09-09 13:15:38 -07007075// TODO: trg as memory.
7076void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7077 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007078 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007079 return;
7080 }
7081
7082 DCHECK_NE(type, Primitive::kPrimVoid);
7083
7084 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7085 if (trg.Equals(return_loc)) {
7086 return;
7087 }
7088
7089 // Let the parallel move resolver take care of all of this.
7090 HParallelMove parallel_move(GetGraph()->GetArena());
7091 parallel_move.AddMove(return_loc, trg, type, nullptr);
7092 GetMoveResolver()->EmitNativeCode(&parallel_move);
7093}
7094
Mark Mendell9c86b482015-09-18 13:36:07 -04007095Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7096 // Create a fixup to be used to create and address the jump table.
7097 JumpTableRIPFixup* table_fixup =
7098 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7099
7100 // We have to populate the jump tables.
7101 fixups_to_jump_tables_.push_back(table_fixup);
7102 return Address::RIP(table_fixup);
7103}
7104
Mark Mendellea5af682015-10-22 17:35:49 -04007105void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7106 const Address& addr_high,
7107 int64_t v,
7108 HInstruction* instruction) {
7109 if (IsInt<32>(v)) {
7110 int32_t v_32 = v;
7111 __ movq(addr_low, Immediate(v_32));
7112 MaybeRecordImplicitNullCheck(instruction);
7113 } else {
7114 // Didn't fit in a register. Do it in pieces.
7115 int32_t low_v = Low32Bits(v);
7116 int32_t high_v = High32Bits(v);
7117 __ movl(addr_low, Immediate(low_v));
7118 MaybeRecordImplicitNullCheck(instruction);
7119 __ movl(addr_high, Immediate(high_v));
7120 }
7121}
7122
Roland Levillain4d027112015-07-01 15:41:14 +01007123#undef __
7124
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007125} // namespace x86_64
7126} // namespace art