blob: 038b14c262bc014424ab4613795d2c651e8f73c4 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
202 __ andl(length_loc.AsRegister<CpuRegister>(), Immediate(INT32_MAX));
203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100250 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000251 at_,
252 dex_pc_,
253 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000254 if (do_clinit_) {
255 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
256 } else {
257 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
258 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100259
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000260 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000262 if (out.IsValid()) {
263 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000265 }
266
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000267 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100268 __ jmp(GetExitLabel());
269 }
270
Alexandre Rames9931f312015-06-19 14:47:01 +0100271 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
272
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The class this slow path will load.
275 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100276
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000277 // The instruction where this slow path is happening.
278 // (Might be the load class or an initialization check).
279 HInstruction* const at_;
280
281 // The dex PC of `at_`.
282 const uint32_t dex_pc_;
283
284 // Whether to initialize the class.
285 const bool do_clinit_;
286
287 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100288};
289
Vladimir Markoaad75c62016-10-03 08:46:48 +0000290class LoadStringSlowPathX86_64 : public SlowPathCode {
291 public:
292 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
293
294 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
295 LocationSummary* locations = instruction_->GetLocations();
296 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
297
298 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
299 __ Bind(GetEntryLabel());
300 SaveLiveRegisters(codegen, locations);
301
Vladimir Markoaad75c62016-10-03 08:46:48 +0000302 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100303 // Custom calling convention: RAX serves as both input and output.
304 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305 x86_64_codegen->InvokeRuntime(kQuickResolveString,
306 instruction_,
307 instruction_->GetDexPc(),
308 this);
309 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
310 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
311 RestoreLiveRegisters(codegen, locations);
312
313 // Store the resolved String to the BSS entry.
314 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
315 locations->Out().AsRegister<CpuRegister>());
316 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
317 __ Bind(fixup_label);
318
319 __ jmp(GetExitLabel());
320 }
321
322 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
323
324 private:
325 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
326};
327
Andreas Gampe85b62f22015-09-09 13:15:38 -0700328class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000329 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000330 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000331 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000333 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000334 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800335 Location arg0, arg1;
336 if (instruction_->IsInstanceOf()) {
337 arg0 = locations->InAt(1);
338 arg1 = locations->Out();
339 } else {
340 arg0 = locations->InAt(0);
341 arg1 = locations->InAt(1);
342 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100343 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000344 DCHECK(instruction_->IsCheckCast()
345 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
Roland Levillain0d5a2812015-11-13 10:07:31 +0000347 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000348 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000349
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350 if (!is_fatal_) {
351 SaveLiveRegisters(codegen, locations);
352 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000353
354 // We're moving two locations to locations that could overlap, so we need a parallel
355 // move resolver.
356 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800357 codegen->EmitParallelMoves(arg0,
358 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
359 Primitive::kPrimNot,
360 arg1,
361 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
362 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100364 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800365 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Class*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000366 } else {
367 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800368 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
369 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000370 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 if (!is_fatal_) {
373 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000374 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000375 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 RestoreLiveRegisters(codegen, locations);
378 __ jmp(GetExitLabel());
379 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000380 }
381
Alexandre Rames9931f312015-06-19 14:47:01 +0100382 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
383
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 bool IsFatal() const OVERRIDE { return is_fatal_; }
385
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000386 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000387 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388
389 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
390};
391
Andreas Gampe85b62f22015-09-09 13:15:38 -0700392class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393 public:
Aart Bik42249c32016-01-07 15:33:50 -0800394 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396
397 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000398 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100400 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000401 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700402 }
403
Alexandre Rames9931f312015-06-19 14:47:01 +0100404 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
405
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700406 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
408};
409
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100410class ArraySetSlowPathX86_64 : public SlowPathCode {
411 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000412 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413
414 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
415 LocationSummary* locations = instruction_->GetLocations();
416 __ Bind(GetEntryLabel());
417 SaveLiveRegisters(codegen, locations);
418
419 InvokeRuntimeCallingConvention calling_convention;
420 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
421 parallel_move.AddMove(
422 locations->InAt(0),
423 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
424 Primitive::kPrimNot,
425 nullptr);
426 parallel_move.AddMove(
427 locations->InAt(1),
428 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
429 Primitive::kPrimInt,
430 nullptr);
431 parallel_move.AddMove(
432 locations->InAt(2),
433 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
434 Primitive::kPrimNot,
435 nullptr);
436 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
437
Roland Levillain0d5a2812015-11-13 10:07:31 +0000438 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100439 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000440 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 RestoreLiveRegisters(codegen, locations);
442 __ jmp(GetExitLabel());
443 }
444
445 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
446
447 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100448 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
449};
450
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100451// Slow path marking an object reference `ref` during a read
452// barrier. The field `obj.field` in the object `obj` holding this
453// reference does not get updated by this slow path after marking (see
454// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
455//
456// This means that after the execution of this slow path, `ref` will
457// always be up-to-date, but `obj.field` may not; i.e., after the
458// flip, `ref` will be a to-space reference, but `obj.field` will
459// probably still be a from-space reference (unless it gets updated by
460// another thread, or if another thread installed another object
461// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000462class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
463 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100464 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
465 Location ref,
466 bool unpoison_ref_before_marking)
467 : SlowPathCode(instruction),
468 ref_(ref),
469 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000470 DCHECK(kEmitCompilerReadBarrier);
471 }
472
473 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
474
475 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
476 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100477 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
478 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000479 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000481 DCHECK(instruction_->IsInstanceFieldGet() ||
482 instruction_->IsStaticFieldGet() ||
483 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100484 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000485 instruction_->IsLoadClass() ||
486 instruction_->IsLoadString() ||
487 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100488 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100489 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
490 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 << "Unexpected instruction in read barrier marking slow path: "
492 << instruction_->DebugName();
493
494 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100495 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000496 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000498 }
Roland Levillain4359e612016-07-20 11:32:19 +0100499 // No need to save live registers; it's taken care of by the
500 // entrypoint. Also, there is no need to update the stack mask,
501 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000502 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 DCHECK_NE(ref_reg, RSP);
504 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100505 // "Compact" slow path, saving two moves.
506 //
507 // Instead of using the standard runtime calling convention (input
508 // and output in R0):
509 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100510 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100511 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100513 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100514 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100515 // of a dedicated entrypoint:
516 //
517 // rX <- ReadBarrierMarkRegX(rX)
518 //
519 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100521 // This runtime call does not require a stack map.
522 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000523 __ jmp(GetExitLabel());
524 }
525
526 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100527 // The location (register) of the marked object reference.
528 const Location ref_;
529 // Should the reference in `ref_` be unpoisoned prior to marking it?
530 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000531
532 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
533};
534
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100535// Slow path marking an object reference `ref` during a read barrier,
536// and if needed, atomically updating the field `obj.field` in the
537// object `obj` holding this reference after marking (contrary to
538// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
539// `obj.field`).
540//
541// This means that after the execution of this slow path, both `ref`
542// and `obj.field` will be up-to-date; i.e., after the flip, both will
543// hold the same to-space reference (unless another thread installed
544// another object reference (different from `ref`) in `obj.field`).
545class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
546 public:
547 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
548 Location ref,
549 CpuRegister obj,
550 const Address& field_addr,
551 bool unpoison_ref_before_marking,
552 CpuRegister temp1,
553 CpuRegister temp2)
554 : SlowPathCode(instruction),
555 ref_(ref),
556 obj_(obj),
557 field_addr_(field_addr),
558 unpoison_ref_before_marking_(unpoison_ref_before_marking),
559 temp1_(temp1),
560 temp2_(temp2) {
561 DCHECK(kEmitCompilerReadBarrier);
562 }
563
564 const char* GetDescription() const OVERRIDE {
565 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
566 }
567
568 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
569 LocationSummary* locations = instruction_->GetLocations();
570 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
571 Register ref_reg = ref_cpu_reg.AsRegister();
572 DCHECK(locations->CanCall());
573 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
574 // This slow path is only used by the UnsafeCASObject intrinsic.
575 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
576 << "Unexpected instruction in read barrier marking and field updating slow path: "
577 << instruction_->DebugName();
578 DCHECK(instruction_->GetLocations()->Intrinsified());
579 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
580
581 __ Bind(GetEntryLabel());
582 if (unpoison_ref_before_marking_) {
583 // Object* ref = ref_addr->AsMirrorPtr()
584 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
585 }
586
587 // Save the old (unpoisoned) reference.
588 __ movl(temp1_, ref_cpu_reg);
589
590 // No need to save live registers; it's taken care of by the
591 // entrypoint. Also, there is no need to update the stack mask,
592 // as this runtime call will not trigger a garbage collection.
593 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
594 DCHECK_NE(ref_reg, RSP);
595 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
596 // "Compact" slow path, saving two moves.
597 //
598 // Instead of using the standard runtime calling convention (input
599 // and output in R0):
600 //
601 // RDI <- ref
602 // RAX <- ReadBarrierMark(RDI)
603 // ref <- RAX
604 //
605 // we just use rX (the register containing `ref`) as input and output
606 // of a dedicated entrypoint:
607 //
608 // rX <- ReadBarrierMarkRegX(rX)
609 //
610 int32_t entry_point_offset =
611 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
612 // This runtime call does not require a stack map.
613 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
614
615 // If the new reference is different from the old reference,
616 // update the field in the holder (`*field_addr`).
617 //
618 // Note that this field could also hold a different object, if
619 // another thread had concurrently changed it. In that case, the
620 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
621 // operation below would abort the CAS, leaving the field as-is.
622 NearLabel done;
623 __ cmpl(temp1_, ref_cpu_reg);
624 __ j(kEqual, &done);
625
626 // Update the the holder's field atomically. This may fail if
627 // mutator updates before us, but it's OK. This is achived
628 // using a strong compare-and-set (CAS) operation with relaxed
629 // memory synchronization ordering, where the expected value is
630 // the old reference and the desired value is the new reference.
631 // This operation is implemented with a 32-bit LOCK CMPXLCHG
632 // instruction, which requires the expected value (the old
633 // reference) to be in EAX. Save RAX beforehand, and move the
634 // expected value (stored in `temp1_`) into EAX.
635 __ movq(temp2_, CpuRegister(RAX));
636 __ movl(CpuRegister(RAX), temp1_);
637
638 // Convenience aliases.
639 CpuRegister base = obj_;
640 CpuRegister expected = CpuRegister(RAX);
641 CpuRegister value = ref_cpu_reg;
642
643 bool base_equals_value = (base.AsRegister() == value.AsRegister());
644 Register value_reg = ref_reg;
645 if (kPoisonHeapReferences) {
646 if (base_equals_value) {
647 // If `base` and `value` are the same register location, move
648 // `value_reg` to a temporary register. This way, poisoning
649 // `value_reg` won't invalidate `base`.
650 value_reg = temp1_.AsRegister();
651 __ movl(CpuRegister(value_reg), base);
652 }
653
654 // Check that the register allocator did not assign the location
655 // of `expected` (RAX) to `value` nor to `base`, so that heap
656 // poisoning (when enabled) works as intended below.
657 // - If `value` were equal to `expected`, both references would
658 // be poisoned twice, meaning they would not be poisoned at
659 // all, as heap poisoning uses address negation.
660 // - If `base` were equal to `expected`, poisoning `expected`
661 // would invalidate `base`.
662 DCHECK_NE(value_reg, expected.AsRegister());
663 DCHECK_NE(base.AsRegister(), expected.AsRegister());
664
665 __ PoisonHeapReference(expected);
666 __ PoisonHeapReference(CpuRegister(value_reg));
667 }
668
669 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
670
671 // If heap poisoning is enabled, we need to unpoison the values
672 // that were poisoned earlier.
673 if (kPoisonHeapReferences) {
674 if (base_equals_value) {
675 // `value_reg` has been moved to a temporary register, no need
676 // to unpoison it.
677 } else {
678 __ UnpoisonHeapReference(CpuRegister(value_reg));
679 }
680 // No need to unpoison `expected` (RAX), as it is be overwritten below.
681 }
682
683 // Restore RAX.
684 __ movq(CpuRegister(RAX), temp2_);
685
686 __ Bind(&done);
687 __ jmp(GetExitLabel());
688 }
689
690 private:
691 // The location (register) of the marked object reference.
692 const Location ref_;
693 // The register containing the object holding the marked object reference field.
694 const CpuRegister obj_;
695 // The address of the marked reference field. The base of this address must be `obj_`.
696 const Address field_addr_;
697
698 // Should the reference in `ref_` be unpoisoned prior to marking it?
699 const bool unpoison_ref_before_marking_;
700
701 const CpuRegister temp1_;
702 const CpuRegister temp2_;
703
704 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
705};
706
Roland Levillain0d5a2812015-11-13 10:07:31 +0000707// Slow path generating a read barrier for a heap reference.
708class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
709 public:
710 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
711 Location out,
712 Location ref,
713 Location obj,
714 uint32_t offset,
715 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000716 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000717 out_(out),
718 ref_(ref),
719 obj_(obj),
720 offset_(offset),
721 index_(index) {
722 DCHECK(kEmitCompilerReadBarrier);
723 // If `obj` is equal to `out` or `ref`, it means the initial
724 // object has been overwritten by (or after) the heap object
725 // reference load to be instrumented, e.g.:
726 //
727 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000728 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000729 //
730 // In that case, we have lost the information about the original
731 // object, and the emitted read barrier cannot work properly.
732 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
733 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
734}
735
736 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
737 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
738 LocationSummary* locations = instruction_->GetLocations();
739 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
740 DCHECK(locations->CanCall());
741 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100742 DCHECK(instruction_->IsInstanceFieldGet() ||
743 instruction_->IsStaticFieldGet() ||
744 instruction_->IsArrayGet() ||
745 instruction_->IsInstanceOf() ||
746 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100747 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000748 << "Unexpected instruction in read barrier for heap reference slow path: "
749 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000750
751 __ Bind(GetEntryLabel());
752 SaveLiveRegisters(codegen, locations);
753
754 // We may have to change the index's value, but as `index_` is a
755 // constant member (like other "inputs" of this slow path),
756 // introduce a copy of it, `index`.
757 Location index = index_;
758 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100759 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000760 if (instruction_->IsArrayGet()) {
761 // Compute real offset and store it in index_.
762 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
763 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
764 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
765 // We are about to change the value of `index_reg` (see the
766 // calls to art::x86_64::X86_64Assembler::shll and
767 // art::x86_64::X86_64Assembler::AddImmediate below), but it
768 // has not been saved by the previous call to
769 // art::SlowPathCode::SaveLiveRegisters, as it is a
770 // callee-save register --
771 // art::SlowPathCode::SaveLiveRegisters does not consider
772 // callee-save registers, as it has been designed with the
773 // assumption that callee-save registers are supposed to be
774 // handled by the called function. So, as a callee-save
775 // register, `index_reg` _would_ eventually be saved onto
776 // the stack, but it would be too late: we would have
777 // changed its value earlier. Therefore, we manually save
778 // it here into another freely available register,
779 // `free_reg`, chosen of course among the caller-save
780 // registers (as a callee-save `free_reg` register would
781 // exhibit the same problem).
782 //
783 // Note we could have requested a temporary register from
784 // the register allocator instead; but we prefer not to, as
785 // this is a slow path, and we know we can find a
786 // caller-save register that is available.
787 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
788 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
789 index_reg = free_reg;
790 index = Location::RegisterLocation(index_reg);
791 } else {
792 // The initial register stored in `index_` has already been
793 // saved in the call to art::SlowPathCode::SaveLiveRegisters
794 // (as it is not a callee-save register), so we can freely
795 // use it.
796 }
797 // Shifting the index value contained in `index_reg` by the
798 // scale factor (2) cannot overflow in practice, as the
799 // runtime is unable to allocate object arrays with a size
800 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
801 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
802 static_assert(
803 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
804 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
805 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
806 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100807 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
808 // intrinsics, `index_` is not shifted by a scale factor of 2
809 // (as in the case of ArrayGet), as it is actually an offset
810 // to an object field within an object.
811 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000812 DCHECK(instruction_->GetLocations()->Intrinsified());
813 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
814 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
815 << instruction_->AsInvoke()->GetIntrinsic();
816 DCHECK_EQ(offset_, 0U);
817 DCHECK(index_.IsRegister());
818 }
819 }
820
821 // We're moving two or three locations to locations that could
822 // overlap, so we need a parallel move resolver.
823 InvokeRuntimeCallingConvention calling_convention;
824 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
825 parallel_move.AddMove(ref_,
826 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
827 Primitive::kPrimNot,
828 nullptr);
829 parallel_move.AddMove(obj_,
830 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
831 Primitive::kPrimNot,
832 nullptr);
833 if (index.IsValid()) {
834 parallel_move.AddMove(index,
835 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
836 Primitive::kPrimInt,
837 nullptr);
838 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
839 } else {
840 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
841 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
842 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100843 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000844 instruction_,
845 instruction_->GetDexPc(),
846 this);
847 CheckEntrypointTypes<
848 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
849 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
850
851 RestoreLiveRegisters(codegen, locations);
852 __ jmp(GetExitLabel());
853 }
854
855 const char* GetDescription() const OVERRIDE {
856 return "ReadBarrierForHeapReferenceSlowPathX86_64";
857 }
858
859 private:
860 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
861 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
862 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
863 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
864 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
865 return static_cast<CpuRegister>(i);
866 }
867 }
868 // We shall never fail to find a free caller-save register, as
869 // there are more than two core caller-save registers on x86-64
870 // (meaning it is possible to find one which is different from
871 // `ref` and `obj`).
872 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
873 LOG(FATAL) << "Could not find a free caller-save register";
874 UNREACHABLE();
875 }
876
Roland Levillain0d5a2812015-11-13 10:07:31 +0000877 const Location out_;
878 const Location ref_;
879 const Location obj_;
880 const uint32_t offset_;
881 // An additional location containing an index to an array.
882 // Only used for HArrayGet and the UnsafeGetObject &
883 // UnsafeGetObjectVolatile intrinsics.
884 const Location index_;
885
886 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
887};
888
889// Slow path generating a read barrier for a GC root.
890class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
891 public:
892 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000893 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(kEmitCompilerReadBarrier);
895 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000896
897 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
898 LocationSummary* locations = instruction_->GetLocations();
899 DCHECK(locations->CanCall());
900 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000901 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
902 << "Unexpected instruction in read barrier for GC root slow path: "
903 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000904
905 __ Bind(GetEntryLabel());
906 SaveLiveRegisters(codegen, locations);
907
908 InvokeRuntimeCallingConvention calling_convention;
909 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
910 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100911 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 instruction_,
913 instruction_->GetDexPc(),
914 this);
915 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
916 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
917
918 RestoreLiveRegisters(codegen, locations);
919 __ jmp(GetExitLabel());
920 }
921
922 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
923
924 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000925 const Location out_;
926 const Location root_;
927
928 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
929};
930
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100931#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100932// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
933#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100934
Roland Levillain4fa13f62015-07-06 18:11:54 +0100935inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700936 switch (cond) {
937 case kCondEQ: return kEqual;
938 case kCondNE: return kNotEqual;
939 case kCondLT: return kLess;
940 case kCondLE: return kLessEqual;
941 case kCondGT: return kGreater;
942 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700943 case kCondB: return kBelow;
944 case kCondBE: return kBelowEqual;
945 case kCondA: return kAbove;
946 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700947 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100948 LOG(FATAL) << "Unreachable";
949 UNREACHABLE();
950}
951
Aart Bike9f37602015-10-09 11:15:55 -0700952// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100953inline Condition X86_64FPCondition(IfCondition cond) {
954 switch (cond) {
955 case kCondEQ: return kEqual;
956 case kCondNE: return kNotEqual;
957 case kCondLT: return kBelow;
958 case kCondLE: return kBelowEqual;
959 case kCondGT: return kAbove;
960 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700961 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 };
963 LOG(FATAL) << "Unreachable";
964 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700965}
966
Vladimir Markodc151b22015-10-15 18:02:30 +0100967HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
968 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100969 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +0100970 switch (desired_dispatch_info.code_ptr_location) {
971 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
972 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
973 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
974 return HInvokeStaticOrDirect::DispatchInfo {
975 desired_dispatch_info.method_load_kind,
976 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
977 desired_dispatch_info.method_load_data,
978 0u
979 };
980 default:
981 return desired_dispatch_info;
982 }
983}
984
Serguei Katkov288c7a82016-05-16 11:53:15 +0600985Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
986 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800987 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000988 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
989 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100990 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000991 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100992 uint32_t offset =
993 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
994 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000995 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100996 }
Vladimir Marko58155012015-08-19 12:49:41 +0000997 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000998 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000999 break;
1000 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
1001 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
1002 break;
1003 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
1004 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001005 method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1006 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001007 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
1008 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001009 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +00001010 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001011 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001012 // Bind a new fixup label at the end of the "movl" insn.
1013 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01001014 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +00001015 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001016 }
Vladimir Marko58155012015-08-19 12:49:41 +00001017 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001018 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001019 Register method_reg;
1020 CpuRegister reg = temp.AsRegister<CpuRegister>();
1021 if (current_method.IsRegister()) {
1022 method_reg = current_method.AsRegister<Register>();
1023 } else {
1024 DCHECK(invoke->GetLocations()->Intrinsified());
1025 DCHECK(!current_method.IsValid());
1026 method_reg = reg.AsRegister();
1027 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1028 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001029 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001030 __ movq(reg,
1031 Address(CpuRegister(method_reg),
1032 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001033 // temp = temp[index_in_cache];
1034 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1035 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001036 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1037 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001038 }
Vladimir Marko58155012015-08-19 12:49:41 +00001039 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001040 return callee_method;
1041}
1042
1043void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1044 Location temp) {
1045 // All registers are assumed to be correctly set up.
1046 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001047
1048 switch (invoke->GetCodePtrLocation()) {
1049 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1050 __ call(&frame_entry_label_);
1051 break;
1052 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001053 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1054 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001055 Label* label = &relative_call_patches_.back().label;
1056 __ call(label); // Bind to the patch label, override at link time.
1057 __ Bind(label); // Bind the label at the end of the "call" insn.
1058 break;
1059 }
1060 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
1061 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +01001062 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
1063 LOG(FATAL) << "Unsupported";
1064 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00001065 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1066 // (callee_method + offset_of_quick_compiled_code)()
1067 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1068 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001069 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001070 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001071 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001072
1073 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001074}
1075
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1077 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1078 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1079 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001080
1081 // Use the calling convention instead of the location of the receiver, as
1082 // intrinsics may have put the receiver in a different register. In the intrinsics
1083 // slow path, the arguments have been moved to the right place, so here we are
1084 // guaranteed that the receiver is the first register of the calling convention.
1085 InvokeDexCallingConvention calling_convention;
1086 Register receiver = calling_convention.GetRegisterAt(0);
1087
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001088 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001089 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001090 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001091 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001092 // Instead of simply (possibly) unpoisoning `temp` here, we should
1093 // emit a read barrier for the previous class reference load.
1094 // However this is not required in practice, as this is an
1095 // intermediate/temporary reference and because the current
1096 // concurrent copying collector keeps the from-space memory
1097 // intact/accessible until the end of the marking phase (the
1098 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001099 __ MaybeUnpoisonHeapReference(temp);
1100 // temp = temp->GetMethodAt(method_offset);
1101 __ movq(temp, Address(temp, method_offset));
1102 // call temp->GetEntryPoint();
1103 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001104 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001105}
1106
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001107void CodeGeneratorX86_64::RecordSimplePatch() {
1108 if (GetCompilerOptions().GetIncludePatchInformation()) {
1109 simple_patches_.emplace_back();
1110 __ Bind(&simple_patches_.back());
1111 }
1112}
1113
Vladimir Markoaad75c62016-10-03 08:46:48 +00001114void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1115 DCHECK(GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001116 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
1117 __ Bind(&string_patches_.back().label);
1118}
1119
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001120void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
1121 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
1122 __ Bind(&type_patches_.back().label);
1123}
1124
Vladimir Markoaad75c62016-10-03 08:46:48 +00001125Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1126 DCHECK(!GetCompilerOptions().IsBootImage());
1127 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
1128 return &string_patches_.back().label;
1129}
1130
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001131Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1132 uint32_t element_offset) {
1133 // Add a patch entry and return the label.
1134 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1135 return &pc_relative_dex_cache_patches_.back().label;
1136}
1137
Vladimir Markoaad75c62016-10-03 08:46:48 +00001138// The label points to the end of the "movl" or another instruction but the literal offset
1139// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1140constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1141
1142template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1143inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1144 const ArenaDeque<PatchInfo<Label>>& infos,
1145 ArenaVector<LinkerPatch>* linker_patches) {
1146 for (const PatchInfo<Label>& info : infos) {
1147 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1148 linker_patches->push_back(
1149 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1150 }
1151}
1152
Vladimir Marko58155012015-08-19 12:49:41 +00001153void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1154 DCHECK(linker_patches->empty());
1155 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001156 method_patches_.size() +
1157 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001158 pc_relative_dex_cache_patches_.size() +
1159 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001160 string_patches_.size() +
1161 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001162 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001163 for (const PatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001164 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001165 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001166 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001167 for (const PatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001168 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001169 linker_patches->push_back(
1170 LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001171 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001172 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1173 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001174 for (const Label& label : simple_patches_) {
1175 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1176 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1177 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001178 if (!GetCompilerOptions().IsBootImage()) {
1179 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1180 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001181 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001182 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001183 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001184 // These are always PC-relative, see GetSupportedLoadClassKind().
1185 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001186}
1187
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001188void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001189 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001190}
1191
1192void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001193 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001194}
1195
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001196size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1197 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1198 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001199}
1200
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001201size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1202 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1203 return kX86_64WordSize;
1204}
1205
1206size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1207 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1208 return kX86_64WordSize;
1209}
1210
1211size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1212 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1213 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001214}
1215
Calin Juravle175dc732015-08-25 15:42:32 +01001216void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1217 HInstruction* instruction,
1218 uint32_t dex_pc,
1219 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001220 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001221 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1222 if (EntrypointRequiresStackMap(entrypoint)) {
1223 RecordPcInfo(instruction, dex_pc, slow_path);
1224 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001225}
1226
Roland Levillaindec8f632016-07-22 17:10:06 +01001227void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1228 HInstruction* instruction,
1229 SlowPathCode* slow_path) {
1230 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001231 GenerateInvokeRuntime(entry_point_offset);
1232}
1233
1234void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001235 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1236}
1237
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001238static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001239// Use a fake return address register to mimic Quick.
1240static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001241CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001242 const X86_64InstructionSetFeatures& isa_features,
1243 const CompilerOptions& compiler_options,
1244 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001245 : CodeGenerator(graph,
1246 kNumberOfCpuRegisters,
1247 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001248 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001249 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1250 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001251 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001252 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1253 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001254 compiler_options,
1255 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001256 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001257 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001258 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001259 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001260 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001261 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001262 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001263 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1264 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001265 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001266 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1267 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001268 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffrayac3ebc32016-10-05 13:13:50 +01001269 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1270 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001271 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1272}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001274InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1275 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001276 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001277 assembler_(codegen->GetAssembler()),
1278 codegen_(codegen) {}
1279
David Brazdil58282f42016-01-14 12:45:10 +00001280void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001281 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001282 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001283
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001284 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001285 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001286}
1287
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001288static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001289 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001290}
David Srbecky9d8606d2015-04-12 09:35:32 +01001291
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001292static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001293 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001294}
1295
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001296void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001297 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001298 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001299 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001300 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001301 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001302
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001303 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001304 __ testq(CpuRegister(RAX), Address(
1305 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001306 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001307 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001308
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001309 if (HasEmptyFrame()) {
1310 return;
1311 }
1312
Nicolas Geoffray98893962015-01-21 12:32:32 +00001313 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001314 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001315 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001316 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001317 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1318 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001319 }
1320 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001321
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001322 int adjust = GetFrameSize() - GetCoreSpillSize();
1323 __ subq(CpuRegister(RSP), Immediate(adjust));
1324 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001325 uint32_t xmm_spill_location = GetFpuSpillStart();
1326 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001327
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001328 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1329 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001330 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1331 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1332 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001333 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001334 }
1335
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001336 // Save the current method if we need it. Note that we do not
1337 // do this in HCurrentMethod, as the instruction might have been removed
1338 // in the SSA graph.
1339 if (RequiresCurrentMethod()) {
1340 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1341 CpuRegister(kMethodRegisterArgument));
1342 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001343}
1344
1345void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001346 __ cfi().RememberState();
1347 if (!HasEmptyFrame()) {
1348 uint32_t xmm_spill_location = GetFpuSpillStart();
1349 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1350 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1351 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1352 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1353 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1354 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1355 }
1356 }
1357
1358 int adjust = GetFrameSize() - GetCoreSpillSize();
1359 __ addq(CpuRegister(RSP), Immediate(adjust));
1360 __ cfi().AdjustCFAOffset(-adjust);
1361
1362 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1363 Register reg = kCoreCalleeSaves[i];
1364 if (allocated_registers_.ContainsCoreRegister(reg)) {
1365 __ popq(CpuRegister(reg));
1366 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1367 __ cfi().Restore(DWARFReg(reg));
1368 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001369 }
1370 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001371 __ ret();
1372 __ cfi().RestoreState();
1373 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001374}
1375
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001376void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1377 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001378}
1379
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001380void CodeGeneratorX86_64::Move(Location destination, Location source) {
1381 if (source.Equals(destination)) {
1382 return;
1383 }
1384 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001385 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001386 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001387 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001388 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001389 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001390 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001391 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1392 } else if (source.IsConstant()) {
1393 HConstant* constant = source.GetConstant();
1394 if (constant->IsLongConstant()) {
1395 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1396 } else {
1397 Load32BitValue(dest, GetInt32ValueOf(constant));
1398 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001399 } else {
1400 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001401 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001402 }
1403 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001404 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001405 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001406 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001407 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001408 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1409 } else if (source.IsConstant()) {
1410 HConstant* constant = source.GetConstant();
1411 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1412 if (constant->IsFloatConstant()) {
1413 Load32BitValue(dest, static_cast<int32_t>(value));
1414 } else {
1415 Load64BitValue(dest, value);
1416 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001417 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001418 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001419 } else {
1420 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001421 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001422 }
1423 } else if (destination.IsStackSlot()) {
1424 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001425 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001426 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001427 } else if (source.IsFpuRegister()) {
1428 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001429 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001430 } else if (source.IsConstant()) {
1431 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001432 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001433 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001434 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001435 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001436 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1437 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001438 }
1439 } else {
1440 DCHECK(destination.IsDoubleStackSlot());
1441 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001442 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001443 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001444 } else if (source.IsFpuRegister()) {
1445 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001446 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001447 } else if (source.IsConstant()) {
1448 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001449 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1450 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001451 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001452 } else {
1453 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001454 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1455 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001456 }
1457 }
1458}
1459
Calin Juravle175dc732015-08-25 15:42:32 +01001460void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1461 DCHECK(location.IsRegister());
1462 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1463}
1464
Calin Juravlee460d1d2015-09-29 04:52:17 +01001465void CodeGeneratorX86_64::MoveLocation(
1466 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1467 Move(dst, src);
1468}
1469
1470void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1471 if (location.IsRegister()) {
1472 locations->AddTemp(location);
1473 } else {
1474 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1475 }
1476}
1477
David Brazdilfc6a86a2015-06-26 10:33:45 +00001478void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001479 DCHECK(!successor->IsExitBlock());
1480
1481 HBasicBlock* block = got->GetBlock();
1482 HInstruction* previous = got->GetPrevious();
1483
1484 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001485 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001486 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1487 return;
1488 }
1489
1490 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1491 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1492 }
1493 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001494 __ jmp(codegen_->GetLabelOf(successor));
1495 }
1496}
1497
David Brazdilfc6a86a2015-06-26 10:33:45 +00001498void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1499 got->SetLocations(nullptr);
1500}
1501
1502void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1503 HandleGoto(got, got->GetSuccessor());
1504}
1505
1506void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1507 try_boundary->SetLocations(nullptr);
1508}
1509
1510void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1511 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1512 if (!successor->IsExitBlock()) {
1513 HandleGoto(try_boundary, successor);
1514 }
1515}
1516
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001517void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1518 exit->SetLocations(nullptr);
1519}
1520
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001521void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001522}
1523
Mark Mendell152408f2015-12-31 12:28:50 -05001524template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001525void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001526 LabelType* true_label,
1527 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001528 if (cond->IsFPConditionTrueIfNaN()) {
1529 __ j(kUnordered, true_label);
1530 } else if (cond->IsFPConditionFalseIfNaN()) {
1531 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001532 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001533 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001534}
1535
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001536void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001537 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001538
Mark Mendellc4701932015-04-10 13:18:51 -04001539 Location left = locations->InAt(0);
1540 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001541 Primitive::Type type = condition->InputAt(0)->GetType();
1542 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001543 case Primitive::kPrimBoolean:
1544 case Primitive::kPrimByte:
1545 case Primitive::kPrimChar:
1546 case Primitive::kPrimShort:
1547 case Primitive::kPrimInt:
1548 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001549 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001550 break;
1551 }
Mark Mendellc4701932015-04-10 13:18:51 -04001552 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001553 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001554 break;
1555 }
1556 case Primitive::kPrimFloat: {
1557 if (right.IsFpuRegister()) {
1558 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1559 } else if (right.IsConstant()) {
1560 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1561 codegen_->LiteralFloatAddress(
1562 right.GetConstant()->AsFloatConstant()->GetValue()));
1563 } else {
1564 DCHECK(right.IsStackSlot());
1565 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1566 Address(CpuRegister(RSP), right.GetStackIndex()));
1567 }
Mark Mendellc4701932015-04-10 13:18:51 -04001568 break;
1569 }
1570 case Primitive::kPrimDouble: {
1571 if (right.IsFpuRegister()) {
1572 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1573 } else if (right.IsConstant()) {
1574 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1575 codegen_->LiteralDoubleAddress(
1576 right.GetConstant()->AsDoubleConstant()->GetValue()));
1577 } else {
1578 DCHECK(right.IsDoubleStackSlot());
1579 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1580 Address(CpuRegister(RSP), right.GetStackIndex()));
1581 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001582 break;
1583 }
1584 default:
1585 LOG(FATAL) << "Unexpected condition type " << type;
1586 }
1587}
1588
1589template<class LabelType>
1590void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1591 LabelType* true_target_in,
1592 LabelType* false_target_in) {
1593 // Generated branching requires both targets to be explicit. If either of the
1594 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1595 LabelType fallthrough_target;
1596 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1597 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1598
1599 // Generate the comparison to set the CC.
1600 GenerateCompareTest(condition);
1601
1602 // Now generate the correct jump(s).
1603 Primitive::Type type = condition->InputAt(0)->GetType();
1604 switch (type) {
1605 case Primitive::kPrimLong: {
1606 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1607 break;
1608 }
1609 case Primitive::kPrimFloat: {
1610 GenerateFPJumps(condition, true_target, false_target);
1611 break;
1612 }
1613 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001614 GenerateFPJumps(condition, true_target, false_target);
1615 break;
1616 }
1617 default:
1618 LOG(FATAL) << "Unexpected condition type " << type;
1619 }
1620
David Brazdil0debae72015-11-12 18:37:00 +00001621 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001622 __ jmp(false_target);
1623 }
David Brazdil0debae72015-11-12 18:37:00 +00001624
1625 if (fallthrough_target.IsLinked()) {
1626 __ Bind(&fallthrough_target);
1627 }
Mark Mendellc4701932015-04-10 13:18:51 -04001628}
1629
David Brazdil0debae72015-11-12 18:37:00 +00001630static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1631 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1632 // are set only strictly before `branch`. We can't use the eflags on long
1633 // conditions if they are materialized due to the complex branching.
1634 return cond->IsCondition() &&
1635 cond->GetNext() == branch &&
1636 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1637}
1638
Mark Mendell152408f2015-12-31 12:28:50 -05001639template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001640void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001641 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001642 LabelType* true_target,
1643 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001644 HInstruction* cond = instruction->InputAt(condition_input_index);
1645
1646 if (true_target == nullptr && false_target == nullptr) {
1647 // Nothing to do. The code always falls through.
1648 return;
1649 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001650 // Constant condition, statically compared against "true" (integer value 1).
1651 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001652 if (true_target != nullptr) {
1653 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001654 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001655 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001656 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001657 if (false_target != nullptr) {
1658 __ jmp(false_target);
1659 }
1660 }
1661 return;
1662 }
1663
1664 // The following code generates these patterns:
1665 // (1) true_target == nullptr && false_target != nullptr
1666 // - opposite condition true => branch to false_target
1667 // (2) true_target != nullptr && false_target == nullptr
1668 // - condition true => branch to true_target
1669 // (3) true_target != nullptr && false_target != nullptr
1670 // - condition true => branch to true_target
1671 // - branch to false_target
1672 if (IsBooleanValueOrMaterializedCondition(cond)) {
1673 if (AreEflagsSetFrom(cond, instruction)) {
1674 if (true_target == nullptr) {
1675 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1676 } else {
1677 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1678 }
1679 } else {
1680 // Materialized condition, compare against 0.
1681 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1682 if (lhs.IsRegister()) {
1683 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1684 } else {
1685 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1686 }
1687 if (true_target == nullptr) {
1688 __ j(kEqual, false_target);
1689 } else {
1690 __ j(kNotEqual, true_target);
1691 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001692 }
1693 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001694 // Condition has not been materialized, use its inputs as the
1695 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001696 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001697
David Brazdil0debae72015-11-12 18:37:00 +00001698 // If this is a long or FP comparison that has been folded into
1699 // the HCondition, generate the comparison directly.
1700 Primitive::Type type = condition->InputAt(0)->GetType();
1701 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1702 GenerateCompareTestAndBranch(condition, true_target, false_target);
1703 return;
1704 }
1705
1706 Location lhs = condition->GetLocations()->InAt(0);
1707 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001708 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001709 if (true_target == nullptr) {
1710 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1711 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001712 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001713 }
Dave Allison20dfc792014-06-16 20:44:29 -07001714 }
David Brazdil0debae72015-11-12 18:37:00 +00001715
1716 // If neither branch falls through (case 3), the conditional branch to `true_target`
1717 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1718 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001719 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001720 }
1721}
1722
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001723void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001724 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1725 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001726 locations->SetInAt(0, Location::Any());
1727 }
1728}
1729
1730void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001731 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1732 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1733 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1734 nullptr : codegen_->GetLabelOf(true_successor);
1735 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1736 nullptr : codegen_->GetLabelOf(false_successor);
1737 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001738}
1739
1740void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1741 LocationSummary* locations = new (GetGraph()->GetArena())
1742 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001743 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001744 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001745 locations->SetInAt(0, Location::Any());
1746 }
1747}
1748
1749void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001750 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001751 GenerateTestAndBranch<Label>(deoptimize,
1752 /* condition_input_index */ 0,
1753 slow_path->GetEntryLabel(),
1754 /* false_target */ nullptr);
1755}
1756
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001757static bool SelectCanUseCMOV(HSelect* select) {
1758 // There are no conditional move instructions for XMMs.
1759 if (Primitive::IsFloatingPointType(select->GetType())) {
1760 return false;
1761 }
1762
1763 // A FP condition doesn't generate the single CC that we need.
1764 HInstruction* condition = select->GetCondition();
1765 if (condition->IsCondition() &&
1766 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1767 return false;
1768 }
1769
1770 // We can generate a CMOV for this Select.
1771 return true;
1772}
1773
David Brazdil74eb1b22015-12-14 11:44:01 +00001774void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1775 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1776 if (Primitive::IsFloatingPointType(select->GetType())) {
1777 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001778 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001779 } else {
1780 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001781 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001782 if (select->InputAt(1)->IsConstant()) {
1783 locations->SetInAt(1, Location::RequiresRegister());
1784 } else {
1785 locations->SetInAt(1, Location::Any());
1786 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001787 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001788 locations->SetInAt(1, Location::Any());
1789 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001790 }
1791 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1792 locations->SetInAt(2, Location::RequiresRegister());
1793 }
1794 locations->SetOut(Location::SameAsFirstInput());
1795}
1796
1797void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1798 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001799 if (SelectCanUseCMOV(select)) {
1800 // If both the condition and the source types are integer, we can generate
1801 // a CMOV to implement Select.
1802 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001803 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001804 DCHECK(locations->InAt(0).Equals(locations->Out()));
1805
1806 HInstruction* select_condition = select->GetCondition();
1807 Condition cond = kNotEqual;
1808
1809 // Figure out how to test the 'condition'.
1810 if (select_condition->IsCondition()) {
1811 HCondition* condition = select_condition->AsCondition();
1812 if (!condition->IsEmittedAtUseSite()) {
1813 // This was a previously materialized condition.
1814 // Can we use the existing condition code?
1815 if (AreEflagsSetFrom(condition, select)) {
1816 // Materialization was the previous instruction. Condition codes are right.
1817 cond = X86_64IntegerCondition(condition->GetCondition());
1818 } else {
1819 // No, we have to recreate the condition code.
1820 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1821 __ testl(cond_reg, cond_reg);
1822 }
1823 } else {
1824 GenerateCompareTest(condition);
1825 cond = X86_64IntegerCondition(condition->GetCondition());
1826 }
1827 } else {
1828 // Must be a boolean condition, which needs to be compared to 0.
1829 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1830 __ testl(cond_reg, cond_reg);
1831 }
1832
1833 // If the condition is true, overwrite the output, which already contains false.
1834 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001835 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1836 if (value_true_loc.IsRegister()) {
1837 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1838 } else {
1839 __ cmov(cond,
1840 value_false,
1841 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1842 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001843 } else {
1844 NearLabel false_target;
1845 GenerateTestAndBranch<NearLabel>(select,
1846 /* condition_input_index */ 2,
1847 /* true_target */ nullptr,
1848 &false_target);
1849 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1850 __ Bind(&false_target);
1851 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001852}
1853
David Srbecky0cf44932015-12-09 14:09:59 +00001854void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1855 new (GetGraph()->GetArena()) LocationSummary(info);
1856}
1857
David Srbeckyd28f4a02016-03-14 17:14:24 +00001858void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1859 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001860}
1861
1862void CodeGeneratorX86_64::GenerateNop() {
1863 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001864}
1865
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001866void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001867 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001868 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001869 // Handle the long/FP comparisons made in instruction simplification.
1870 switch (cond->InputAt(0)->GetType()) {
1871 case Primitive::kPrimLong:
1872 locations->SetInAt(0, Location::RequiresRegister());
1873 locations->SetInAt(1, Location::Any());
1874 break;
1875 case Primitive::kPrimFloat:
1876 case Primitive::kPrimDouble:
1877 locations->SetInAt(0, Location::RequiresFpuRegister());
1878 locations->SetInAt(1, Location::Any());
1879 break;
1880 default:
1881 locations->SetInAt(0, Location::RequiresRegister());
1882 locations->SetInAt(1, Location::Any());
1883 break;
1884 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001885 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001886 locations->SetOut(Location::RequiresRegister());
1887 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001888}
1889
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001890void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001891 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001892 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001893 }
Mark Mendellc4701932015-04-10 13:18:51 -04001894
1895 LocationSummary* locations = cond->GetLocations();
1896 Location lhs = locations->InAt(0);
1897 Location rhs = locations->InAt(1);
1898 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001899 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001900
1901 switch (cond->InputAt(0)->GetType()) {
1902 default:
1903 // Integer case.
1904
1905 // Clear output register: setcc only sets the low byte.
1906 __ xorl(reg, reg);
1907
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001908 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001909 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001910 return;
1911 case Primitive::kPrimLong:
1912 // Clear output register: setcc only sets the low byte.
1913 __ xorl(reg, reg);
1914
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001915 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001916 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001917 return;
1918 case Primitive::kPrimFloat: {
1919 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1920 if (rhs.IsConstant()) {
1921 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1922 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1923 } else if (rhs.IsStackSlot()) {
1924 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1925 } else {
1926 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1927 }
1928 GenerateFPJumps(cond, &true_label, &false_label);
1929 break;
1930 }
1931 case Primitive::kPrimDouble: {
1932 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1933 if (rhs.IsConstant()) {
1934 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1935 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1936 } else if (rhs.IsDoubleStackSlot()) {
1937 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1938 } else {
1939 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1940 }
1941 GenerateFPJumps(cond, &true_label, &false_label);
1942 break;
1943 }
1944 }
1945
1946 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001947 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001948
Roland Levillain4fa13f62015-07-06 18:11:54 +01001949 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001950 __ Bind(&false_label);
1951 __ xorl(reg, reg);
1952 __ jmp(&done_label);
1953
Roland Levillain4fa13f62015-07-06 18:11:54 +01001954 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001955 __ Bind(&true_label);
1956 __ movl(reg, Immediate(1));
1957 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001958}
1959
1960void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001961 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001962}
1963
1964void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001965 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001966}
1967
1968void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001969 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001970}
1971
1972void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001973 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001974}
1975
1976void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001977 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001978}
1979
1980void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001981 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001982}
1983
1984void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001985 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001986}
1987
1988void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001989 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001990}
1991
1992void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001994}
1995
1996void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001997 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001998}
1999
2000void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002001 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002002}
2003
2004void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002005 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002006}
2007
Aart Bike9f37602015-10-09 11:15:55 -07002008void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002009 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002010}
2011
2012void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002013 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002014}
2015
2016void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002017 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002018}
2019
2020void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002021 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002022}
2023
2024void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002025 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002026}
2027
2028void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002029 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002030}
2031
2032void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002033 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002034}
2035
2036void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002037 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002038}
2039
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002040void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002041 LocationSummary* locations =
2042 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002043 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002044 case Primitive::kPrimBoolean:
2045 case Primitive::kPrimByte:
2046 case Primitive::kPrimShort:
2047 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002048 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002049 case Primitive::kPrimLong: {
2050 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002051 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002052 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2053 break;
2054 }
2055 case Primitive::kPrimFloat:
2056 case Primitive::kPrimDouble: {
2057 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002058 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002059 locations->SetOut(Location::RequiresRegister());
2060 break;
2061 }
2062 default:
2063 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2064 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002065}
2066
2067void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002068 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002069 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002070 Location left = locations->InAt(0);
2071 Location right = locations->InAt(1);
2072
Mark Mendell0c9497d2015-08-21 09:30:05 -04002073 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002074 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002075 Condition less_cond = kLess;
2076
Calin Juravleddb7df22014-11-25 20:56:51 +00002077 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002078 case Primitive::kPrimBoolean:
2079 case Primitive::kPrimByte:
2080 case Primitive::kPrimShort:
2081 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002082 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002083 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002084 break;
2085 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002086 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002087 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002088 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002089 }
2090 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002091 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2092 if (right.IsConstant()) {
2093 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2094 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2095 } else if (right.IsStackSlot()) {
2096 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2097 } else {
2098 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2099 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002100 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002101 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002102 break;
2103 }
2104 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002105 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2106 if (right.IsConstant()) {
2107 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2108 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2109 } else if (right.IsDoubleStackSlot()) {
2110 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2111 } else {
2112 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2113 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002114 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002115 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002116 break;
2117 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002118 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002119 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002120 }
Aart Bika19616e2016-02-01 18:57:58 -08002121
Calin Juravleddb7df22014-11-25 20:56:51 +00002122 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002123 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002124 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002125
Calin Juravle91debbc2014-11-26 19:01:09 +00002126 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002127 __ movl(out, Immediate(1));
2128 __ jmp(&done);
2129
2130 __ Bind(&less);
2131 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002132
2133 __ Bind(&done);
2134}
2135
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002136void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002137 LocationSummary* locations =
2138 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002139 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002140}
2141
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002142void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002143 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002144}
2145
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002146void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2147 LocationSummary* locations =
2148 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2149 locations->SetOut(Location::ConstantLocation(constant));
2150}
2151
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002152void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002153 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002154}
2155
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002156void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002157 LocationSummary* locations =
2158 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002159 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002160}
2161
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002162void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002163 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002164}
2165
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002166void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2167 LocationSummary* locations =
2168 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2169 locations->SetOut(Location::ConstantLocation(constant));
2170}
2171
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002172void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002173 // Will be generated at use site.
2174}
2175
2176void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2177 LocationSummary* locations =
2178 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2179 locations->SetOut(Location::ConstantLocation(constant));
2180}
2181
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002182void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2183 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002184 // Will be generated at use site.
2185}
2186
Calin Juravle27df7582015-04-17 19:12:31 +01002187void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2188 memory_barrier->SetLocations(nullptr);
2189}
2190
2191void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002192 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002193}
2194
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002195void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2196 ret->SetLocations(nullptr);
2197}
2198
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002199void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002200 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002201}
2202
2203void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002204 LocationSummary* locations =
2205 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002206 switch (ret->InputAt(0)->GetType()) {
2207 case Primitive::kPrimBoolean:
2208 case Primitive::kPrimByte:
2209 case Primitive::kPrimChar:
2210 case Primitive::kPrimShort:
2211 case Primitive::kPrimInt:
2212 case Primitive::kPrimNot:
2213 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002214 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002215 break;
2216
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002217 case Primitive::kPrimFloat:
2218 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002219 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002220 break;
2221
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002222 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002223 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002224 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002225}
2226
2227void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2228 if (kIsDebugBuild) {
2229 switch (ret->InputAt(0)->GetType()) {
2230 case Primitive::kPrimBoolean:
2231 case Primitive::kPrimByte:
2232 case Primitive::kPrimChar:
2233 case Primitive::kPrimShort:
2234 case Primitive::kPrimInt:
2235 case Primitive::kPrimNot:
2236 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002237 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002238 break;
2239
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002240 case Primitive::kPrimFloat:
2241 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002242 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002243 XMM0);
2244 break;
2245
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002246 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002247 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002248 }
2249 }
2250 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002251}
2252
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002253Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2254 switch (type) {
2255 case Primitive::kPrimBoolean:
2256 case Primitive::kPrimByte:
2257 case Primitive::kPrimChar:
2258 case Primitive::kPrimShort:
2259 case Primitive::kPrimInt:
2260 case Primitive::kPrimNot:
2261 case Primitive::kPrimLong:
2262 return Location::RegisterLocation(RAX);
2263
2264 case Primitive::kPrimVoid:
2265 return Location::NoLocation();
2266
2267 case Primitive::kPrimDouble:
2268 case Primitive::kPrimFloat:
2269 return Location::FpuRegisterLocation(XMM0);
2270 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002271
2272 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002273}
2274
2275Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2276 return Location::RegisterLocation(kMethodRegisterArgument);
2277}
2278
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002279Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002280 switch (type) {
2281 case Primitive::kPrimBoolean:
2282 case Primitive::kPrimByte:
2283 case Primitive::kPrimChar:
2284 case Primitive::kPrimShort:
2285 case Primitive::kPrimInt:
2286 case Primitive::kPrimNot: {
2287 uint32_t index = gp_index_++;
2288 stack_index_++;
2289 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002290 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002291 } else {
2292 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2293 }
2294 }
2295
2296 case Primitive::kPrimLong: {
2297 uint32_t index = gp_index_;
2298 stack_index_ += 2;
2299 if (index < calling_convention.GetNumberOfRegisters()) {
2300 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002301 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002302 } else {
2303 gp_index_ += 2;
2304 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2305 }
2306 }
2307
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002308 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002309 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002310 stack_index_++;
2311 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002312 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002313 } else {
2314 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2315 }
2316 }
2317
2318 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002319 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002320 stack_index_ += 2;
2321 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002322 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002323 } else {
2324 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2325 }
2326 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002327
2328 case Primitive::kPrimVoid:
2329 LOG(FATAL) << "Unexpected parameter type " << type;
2330 break;
2331 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002332 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002333}
2334
Calin Juravle175dc732015-08-25 15:42:32 +01002335void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2336 // The trampoline uses the same calling convention as dex calling conventions,
2337 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2338 // the method_idx.
2339 HandleInvoke(invoke);
2340}
2341
2342void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2343 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2344}
2345
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002346void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002347 // Explicit clinit checks triggered by static invokes must have been pruned by
2348 // art::PrepareForRegisterAllocation.
2349 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002350
Mark Mendellfb8d2792015-03-31 22:16:59 -04002351 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002352 if (intrinsic.TryDispatch(invoke)) {
2353 return;
2354 }
2355
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002356 HandleInvoke(invoke);
2357}
2358
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002359static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2360 if (invoke->GetLocations()->Intrinsified()) {
2361 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2362 intrinsic.Dispatch(invoke);
2363 return true;
2364 }
2365 return false;
2366}
2367
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002368void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002369 // Explicit clinit checks triggered by static invokes must have been pruned by
2370 // art::PrepareForRegisterAllocation.
2371 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002372
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002373 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2374 return;
2375 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002376
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002377 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002378 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002379 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002380 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002381}
2382
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002383void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002384 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002385 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002386}
2387
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002388void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002389 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002390 if (intrinsic.TryDispatch(invoke)) {
2391 return;
2392 }
2393
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002394 HandleInvoke(invoke);
2395}
2396
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002397void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002398 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2399 return;
2400 }
2401
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002402 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002403 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002404 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002405}
2406
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002407void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2408 HandleInvoke(invoke);
2409 // Add the hidden argument.
2410 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2411}
2412
2413void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2414 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002415 LocationSummary* locations = invoke->GetLocations();
2416 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2417 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002418 Location receiver = locations->InAt(0);
2419 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2420
Roland Levillain0d5a2812015-11-13 10:07:31 +00002421 // Set the hidden argument. This is safe to do this here, as RAX
2422 // won't be modified thereafter, before the `call` instruction.
2423 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002424 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002425
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002426 if (receiver.IsStackSlot()) {
2427 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002428 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002429 __ movl(temp, Address(temp, class_offset));
2430 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002431 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002432 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002433 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002434 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002435 // Instead of simply (possibly) unpoisoning `temp` here, we should
2436 // emit a read barrier for the previous class reference load.
2437 // However this is not required in practice, as this is an
2438 // intermediate/temporary reference and because the current
2439 // concurrent copying collector keeps the from-space memory
2440 // intact/accessible until the end of the marking phase (the
2441 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002442 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002443 // temp = temp->GetAddressOfIMT()
2444 __ movq(temp,
2445 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2446 // temp = temp->GetImtEntryAt(method_offset);
2447 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002448 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002449 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002450 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002451 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002452 __ call(Address(
2453 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002454
2455 DCHECK(!codegen_->IsLeafMethod());
2456 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2457}
2458
Roland Levillain88cb1752014-10-20 16:36:47 +01002459void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2460 LocationSummary* locations =
2461 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2462 switch (neg->GetResultType()) {
2463 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002464 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002465 locations->SetInAt(0, Location::RequiresRegister());
2466 locations->SetOut(Location::SameAsFirstInput());
2467 break;
2468
Roland Levillain88cb1752014-10-20 16:36:47 +01002469 case Primitive::kPrimFloat:
2470 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002471 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002472 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002473 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002474 break;
2475
2476 default:
2477 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2478 }
2479}
2480
2481void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2482 LocationSummary* locations = neg->GetLocations();
2483 Location out = locations->Out();
2484 Location in = locations->InAt(0);
2485 switch (neg->GetResultType()) {
2486 case Primitive::kPrimInt:
2487 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002488 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002489 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002490 break;
2491
2492 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002493 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002494 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002495 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002496 break;
2497
Roland Levillain5368c212014-11-27 15:03:41 +00002498 case Primitive::kPrimFloat: {
2499 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002500 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002501 // Implement float negation with an exclusive or with value
2502 // 0x80000000 (mask for bit 31, representing the sign of a
2503 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002504 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002505 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002506 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002507 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002508
Roland Levillain5368c212014-11-27 15:03:41 +00002509 case Primitive::kPrimDouble: {
2510 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002511 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002512 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002513 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002514 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002515 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002516 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002517 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002518 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002519
2520 default:
2521 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2522 }
2523}
2524
Roland Levillaindff1f282014-11-05 14:15:05 +00002525void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2526 LocationSummary* locations =
2527 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2528 Primitive::Type result_type = conversion->GetResultType();
2529 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002530 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002531
David Brazdilb2bd1c52015-03-25 11:17:37 +00002532 // The Java language does not allow treating boolean as an integral type but
2533 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002534
Roland Levillaindff1f282014-11-05 14:15:05 +00002535 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002536 case Primitive::kPrimByte:
2537 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002538 case Primitive::kPrimLong:
2539 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002540 case Primitive::kPrimBoolean:
2541 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002542 case Primitive::kPrimShort:
2543 case Primitive::kPrimInt:
2544 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002545 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002546 locations->SetInAt(0, Location::Any());
2547 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2548 break;
2549
2550 default:
2551 LOG(FATAL) << "Unexpected type conversion from " << input_type
2552 << " to " << result_type;
2553 }
2554 break;
2555
Roland Levillain01a8d712014-11-14 16:27:39 +00002556 case Primitive::kPrimShort:
2557 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002558 case Primitive::kPrimLong:
2559 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002560 case Primitive::kPrimBoolean:
2561 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002562 case Primitive::kPrimByte:
2563 case Primitive::kPrimInt:
2564 case Primitive::kPrimChar:
2565 // Processing a Dex `int-to-short' instruction.
2566 locations->SetInAt(0, Location::Any());
2567 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2568 break;
2569
2570 default:
2571 LOG(FATAL) << "Unexpected type conversion from " << input_type
2572 << " to " << result_type;
2573 }
2574 break;
2575
Roland Levillain946e1432014-11-11 17:35:19 +00002576 case Primitive::kPrimInt:
2577 switch (input_type) {
2578 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002579 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002580 locations->SetInAt(0, Location::Any());
2581 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2582 break;
2583
2584 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002585 // Processing a Dex `float-to-int' instruction.
2586 locations->SetInAt(0, Location::RequiresFpuRegister());
2587 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002588 break;
2589
Roland Levillain946e1432014-11-11 17:35:19 +00002590 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002591 // Processing a Dex `double-to-int' instruction.
2592 locations->SetInAt(0, Location::RequiresFpuRegister());
2593 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002594 break;
2595
2596 default:
2597 LOG(FATAL) << "Unexpected type conversion from " << input_type
2598 << " to " << result_type;
2599 }
2600 break;
2601
Roland Levillaindff1f282014-11-05 14:15:05 +00002602 case Primitive::kPrimLong:
2603 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002604 case Primitive::kPrimBoolean:
2605 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002606 case Primitive::kPrimByte:
2607 case Primitive::kPrimShort:
2608 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002609 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002610 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002611 // TODO: We would benefit from a (to-be-implemented)
2612 // Location::RegisterOrStackSlot requirement for this input.
2613 locations->SetInAt(0, Location::RequiresRegister());
2614 locations->SetOut(Location::RequiresRegister());
2615 break;
2616
2617 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002618 // Processing a Dex `float-to-long' instruction.
2619 locations->SetInAt(0, Location::RequiresFpuRegister());
2620 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002621 break;
2622
Roland Levillaindff1f282014-11-05 14:15:05 +00002623 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002624 // Processing a Dex `double-to-long' instruction.
2625 locations->SetInAt(0, Location::RequiresFpuRegister());
2626 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002627 break;
2628
2629 default:
2630 LOG(FATAL) << "Unexpected type conversion from " << input_type
2631 << " to " << result_type;
2632 }
2633 break;
2634
Roland Levillain981e4542014-11-14 11:47:14 +00002635 case Primitive::kPrimChar:
2636 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002637 case Primitive::kPrimLong:
2638 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002639 case Primitive::kPrimBoolean:
2640 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002641 case Primitive::kPrimByte:
2642 case Primitive::kPrimShort:
2643 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002644 // Processing a Dex `int-to-char' instruction.
2645 locations->SetInAt(0, Location::Any());
2646 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2647 break;
2648
2649 default:
2650 LOG(FATAL) << "Unexpected type conversion from " << input_type
2651 << " to " << result_type;
2652 }
2653 break;
2654
Roland Levillaindff1f282014-11-05 14:15:05 +00002655 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002656 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002657 case Primitive::kPrimBoolean:
2658 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002659 case Primitive::kPrimByte:
2660 case Primitive::kPrimShort:
2661 case Primitive::kPrimInt:
2662 case Primitive::kPrimChar:
2663 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002664 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002665 locations->SetOut(Location::RequiresFpuRegister());
2666 break;
2667
2668 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002669 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002670 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002671 locations->SetOut(Location::RequiresFpuRegister());
2672 break;
2673
Roland Levillaincff13742014-11-17 14:32:17 +00002674 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002675 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002676 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002677 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002678 break;
2679
2680 default:
2681 LOG(FATAL) << "Unexpected type conversion from " << input_type
2682 << " to " << result_type;
2683 };
2684 break;
2685
Roland Levillaindff1f282014-11-05 14:15:05 +00002686 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002687 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002688 case Primitive::kPrimBoolean:
2689 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002690 case Primitive::kPrimByte:
2691 case Primitive::kPrimShort:
2692 case Primitive::kPrimInt:
2693 case Primitive::kPrimChar:
2694 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002695 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002696 locations->SetOut(Location::RequiresFpuRegister());
2697 break;
2698
2699 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002700 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002701 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002702 locations->SetOut(Location::RequiresFpuRegister());
2703 break;
2704
Roland Levillaincff13742014-11-17 14:32:17 +00002705 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002706 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002707 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002708 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002709 break;
2710
2711 default:
2712 LOG(FATAL) << "Unexpected type conversion from " << input_type
2713 << " to " << result_type;
2714 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002715 break;
2716
2717 default:
2718 LOG(FATAL) << "Unexpected type conversion from " << input_type
2719 << " to " << result_type;
2720 }
2721}
2722
2723void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2724 LocationSummary* locations = conversion->GetLocations();
2725 Location out = locations->Out();
2726 Location in = locations->InAt(0);
2727 Primitive::Type result_type = conversion->GetResultType();
2728 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002729 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002730 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002731 case Primitive::kPrimByte:
2732 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002733 case Primitive::kPrimLong:
2734 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002735 case Primitive::kPrimBoolean:
2736 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002737 case Primitive::kPrimShort:
2738 case Primitive::kPrimInt:
2739 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002740 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002741 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002742 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002743 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002744 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002745 Address(CpuRegister(RSP), in.GetStackIndex()));
2746 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002747 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002748 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002749 }
2750 break;
2751
2752 default:
2753 LOG(FATAL) << "Unexpected type conversion from " << input_type
2754 << " to " << result_type;
2755 }
2756 break;
2757
Roland Levillain01a8d712014-11-14 16:27:39 +00002758 case Primitive::kPrimShort:
2759 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002760 case Primitive::kPrimLong:
2761 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002762 case Primitive::kPrimBoolean:
2763 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002764 case Primitive::kPrimByte:
2765 case Primitive::kPrimInt:
2766 case Primitive::kPrimChar:
2767 // Processing a Dex `int-to-short' instruction.
2768 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002769 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002770 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002771 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002772 Address(CpuRegister(RSP), in.GetStackIndex()));
2773 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002774 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002775 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002776 }
2777 break;
2778
2779 default:
2780 LOG(FATAL) << "Unexpected type conversion from " << input_type
2781 << " to " << result_type;
2782 }
2783 break;
2784
Roland Levillain946e1432014-11-11 17:35:19 +00002785 case Primitive::kPrimInt:
2786 switch (input_type) {
2787 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002788 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002789 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002790 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002791 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002792 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002793 Address(CpuRegister(RSP), in.GetStackIndex()));
2794 } else {
2795 DCHECK(in.IsConstant());
2796 DCHECK(in.GetConstant()->IsLongConstant());
2797 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002798 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002799 }
2800 break;
2801
Roland Levillain3f8f9362014-12-02 17:45:01 +00002802 case Primitive::kPrimFloat: {
2803 // Processing a Dex `float-to-int' instruction.
2804 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2805 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002806 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002807
2808 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002809 // if input >= (float)INT_MAX goto done
2810 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002811 __ j(kAboveEqual, &done);
2812 // if input == NaN goto nan
2813 __ j(kUnordered, &nan);
2814 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002815 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002816 __ jmp(&done);
2817 __ Bind(&nan);
2818 // output = 0
2819 __ xorl(output, output);
2820 __ Bind(&done);
2821 break;
2822 }
2823
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002824 case Primitive::kPrimDouble: {
2825 // Processing a Dex `double-to-int' instruction.
2826 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2827 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002828 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002829
2830 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002831 // if input >= (double)INT_MAX goto done
2832 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002833 __ j(kAboveEqual, &done);
2834 // if input == NaN goto nan
2835 __ j(kUnordered, &nan);
2836 // output = double-to-int-truncate(input)
2837 __ cvttsd2si(output, input);
2838 __ jmp(&done);
2839 __ Bind(&nan);
2840 // output = 0
2841 __ xorl(output, output);
2842 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002843 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002844 }
Roland Levillain946e1432014-11-11 17:35:19 +00002845
2846 default:
2847 LOG(FATAL) << "Unexpected type conversion from " << input_type
2848 << " to " << result_type;
2849 }
2850 break;
2851
Roland Levillaindff1f282014-11-05 14:15:05 +00002852 case Primitive::kPrimLong:
2853 switch (input_type) {
2854 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002855 case Primitive::kPrimBoolean:
2856 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002857 case Primitive::kPrimByte:
2858 case Primitive::kPrimShort:
2859 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002860 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002861 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002862 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002863 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002864 break;
2865
Roland Levillain624279f2014-12-04 11:54:28 +00002866 case Primitive::kPrimFloat: {
2867 // Processing a Dex `float-to-long' instruction.
2868 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2869 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002870 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002871
Mark Mendell92e83bf2015-05-07 11:25:03 -04002872 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002873 // if input >= (float)LONG_MAX goto done
2874 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002875 __ j(kAboveEqual, &done);
2876 // if input == NaN goto nan
2877 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002878 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002879 __ cvttss2si(output, input, true);
2880 __ jmp(&done);
2881 __ Bind(&nan);
2882 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002883 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002884 __ Bind(&done);
2885 break;
2886 }
2887
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002888 case Primitive::kPrimDouble: {
2889 // Processing a Dex `double-to-long' instruction.
2890 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2891 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002892 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002893
Mark Mendell92e83bf2015-05-07 11:25:03 -04002894 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002895 // if input >= (double)LONG_MAX goto done
2896 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002897 __ j(kAboveEqual, &done);
2898 // if input == NaN goto nan
2899 __ j(kUnordered, &nan);
2900 // output = double-to-long-truncate(input)
2901 __ cvttsd2si(output, input, true);
2902 __ jmp(&done);
2903 __ Bind(&nan);
2904 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002905 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002906 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002907 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002908 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002909
2910 default:
2911 LOG(FATAL) << "Unexpected type conversion from " << input_type
2912 << " to " << result_type;
2913 }
2914 break;
2915
Roland Levillain981e4542014-11-14 11:47:14 +00002916 case Primitive::kPrimChar:
2917 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002918 case Primitive::kPrimLong:
2919 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002920 case Primitive::kPrimBoolean:
2921 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002922 case Primitive::kPrimByte:
2923 case Primitive::kPrimShort:
2924 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002925 // Processing a Dex `int-to-char' instruction.
2926 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002927 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002928 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002929 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002930 Address(CpuRegister(RSP), in.GetStackIndex()));
2931 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002932 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002933 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002934 }
2935 break;
2936
2937 default:
2938 LOG(FATAL) << "Unexpected type conversion from " << input_type
2939 << " to " << result_type;
2940 }
2941 break;
2942
Roland Levillaindff1f282014-11-05 14:15:05 +00002943 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002944 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002945 case Primitive::kPrimBoolean:
2946 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002947 case Primitive::kPrimByte:
2948 case Primitive::kPrimShort:
2949 case Primitive::kPrimInt:
2950 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002951 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002952 if (in.IsRegister()) {
2953 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2954 } else if (in.IsConstant()) {
2955 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2956 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002957 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002958 } else {
2959 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2960 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2961 }
Roland Levillaincff13742014-11-17 14:32:17 +00002962 break;
2963
2964 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002965 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002966 if (in.IsRegister()) {
2967 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2968 } else if (in.IsConstant()) {
2969 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2970 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002971 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002972 } else {
2973 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2974 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2975 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002976 break;
2977
Roland Levillaincff13742014-11-17 14:32:17 +00002978 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002979 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002980 if (in.IsFpuRegister()) {
2981 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2982 } else if (in.IsConstant()) {
2983 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2984 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002985 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002986 } else {
2987 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2988 Address(CpuRegister(RSP), in.GetStackIndex()));
2989 }
Roland Levillaincff13742014-11-17 14:32:17 +00002990 break;
2991
2992 default:
2993 LOG(FATAL) << "Unexpected type conversion from " << input_type
2994 << " to " << result_type;
2995 };
2996 break;
2997
Roland Levillaindff1f282014-11-05 14:15:05 +00002998 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002999 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003000 case Primitive::kPrimBoolean:
3001 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00003002 case Primitive::kPrimByte:
3003 case Primitive::kPrimShort:
3004 case Primitive::kPrimInt:
3005 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00003006 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003007 if (in.IsRegister()) {
3008 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3009 } else if (in.IsConstant()) {
3010 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3011 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003012 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003013 } else {
3014 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3015 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3016 }
Roland Levillaincff13742014-11-17 14:32:17 +00003017 break;
3018
3019 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003020 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003021 if (in.IsRegister()) {
3022 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3023 } else if (in.IsConstant()) {
3024 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3025 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003026 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003027 } else {
3028 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3029 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3030 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003031 break;
3032
Roland Levillaincff13742014-11-17 14:32:17 +00003033 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003034 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003035 if (in.IsFpuRegister()) {
3036 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3037 } else if (in.IsConstant()) {
3038 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3039 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003040 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003041 } else {
3042 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3043 Address(CpuRegister(RSP), in.GetStackIndex()));
3044 }
Roland Levillaincff13742014-11-17 14:32:17 +00003045 break;
3046
3047 default:
3048 LOG(FATAL) << "Unexpected type conversion from " << input_type
3049 << " to " << result_type;
3050 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003051 break;
3052
3053 default:
3054 LOG(FATAL) << "Unexpected type conversion from " << input_type
3055 << " to " << result_type;
3056 }
3057}
3058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003059void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003060 LocationSummary* locations =
3061 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003062 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003063 case Primitive::kPrimInt: {
3064 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003065 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3066 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003067 break;
3068 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003069
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003070 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003071 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003072 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003073 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003074 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003075 break;
3076 }
3077
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003078 case Primitive::kPrimDouble:
3079 case Primitive::kPrimFloat: {
3080 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003081 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003082 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003083 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003084 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003085
3086 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003087 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003088 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003089}
3090
3091void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3092 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003093 Location first = locations->InAt(0);
3094 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003095 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003096
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003097 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003098 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003099 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003100 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3101 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003102 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3103 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003104 } else {
3105 __ leal(out.AsRegister<CpuRegister>(), Address(
3106 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3107 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003108 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003109 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3110 __ addl(out.AsRegister<CpuRegister>(),
3111 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3112 } else {
3113 __ leal(out.AsRegister<CpuRegister>(), Address(
3114 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3115 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003116 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003117 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003118 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003119 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003120 break;
3121 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003122
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003123 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003124 if (second.IsRegister()) {
3125 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3126 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003127 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3128 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003129 } else {
3130 __ leaq(out.AsRegister<CpuRegister>(), Address(
3131 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3132 }
3133 } else {
3134 DCHECK(second.IsConstant());
3135 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3136 int32_t int32_value = Low32Bits(value);
3137 DCHECK_EQ(int32_value, value);
3138 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3139 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3140 } else {
3141 __ leaq(out.AsRegister<CpuRegister>(), Address(
3142 first.AsRegister<CpuRegister>(), int32_value));
3143 }
3144 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003145 break;
3146 }
3147
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003148 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003149 if (second.IsFpuRegister()) {
3150 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3151 } else if (second.IsConstant()) {
3152 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003153 codegen_->LiteralFloatAddress(
3154 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003155 } else {
3156 DCHECK(second.IsStackSlot());
3157 __ addss(first.AsFpuRegister<XmmRegister>(),
3158 Address(CpuRegister(RSP), second.GetStackIndex()));
3159 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003160 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003161 }
3162
3163 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003164 if (second.IsFpuRegister()) {
3165 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3166 } else if (second.IsConstant()) {
3167 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003168 codegen_->LiteralDoubleAddress(
3169 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003170 } else {
3171 DCHECK(second.IsDoubleStackSlot());
3172 __ addsd(first.AsFpuRegister<XmmRegister>(),
3173 Address(CpuRegister(RSP), second.GetStackIndex()));
3174 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003175 break;
3176 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003177
3178 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003179 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003180 }
3181}
3182
3183void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003184 LocationSummary* locations =
3185 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003186 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003187 case Primitive::kPrimInt: {
3188 locations->SetInAt(0, Location::RequiresRegister());
3189 locations->SetInAt(1, Location::Any());
3190 locations->SetOut(Location::SameAsFirstInput());
3191 break;
3192 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003193 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003194 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003195 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003196 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003197 break;
3198 }
Calin Juravle11351682014-10-23 15:38:15 +01003199 case Primitive::kPrimFloat:
3200 case Primitive::kPrimDouble: {
3201 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003202 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003203 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003204 break;
Calin Juravle11351682014-10-23 15:38:15 +01003205 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003206 default:
Calin Juravle11351682014-10-23 15:38:15 +01003207 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003208 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003209}
3210
3211void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3212 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003213 Location first = locations->InAt(0);
3214 Location second = locations->InAt(1);
3215 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003216 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003217 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003218 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003219 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003220 } else if (second.IsConstant()) {
3221 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003222 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003223 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003224 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003225 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003226 break;
3227 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003228 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003229 if (second.IsConstant()) {
3230 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3231 DCHECK(IsInt<32>(value));
3232 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3233 } else {
3234 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3235 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003236 break;
3237 }
3238
Calin Juravle11351682014-10-23 15:38:15 +01003239 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003240 if (second.IsFpuRegister()) {
3241 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3242 } else if (second.IsConstant()) {
3243 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003244 codegen_->LiteralFloatAddress(
3245 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003246 } else {
3247 DCHECK(second.IsStackSlot());
3248 __ subss(first.AsFpuRegister<XmmRegister>(),
3249 Address(CpuRegister(RSP), second.GetStackIndex()));
3250 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003251 break;
Calin Juravle11351682014-10-23 15:38:15 +01003252 }
3253
3254 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003255 if (second.IsFpuRegister()) {
3256 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3257 } else if (second.IsConstant()) {
3258 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003259 codegen_->LiteralDoubleAddress(
3260 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003261 } else {
3262 DCHECK(second.IsDoubleStackSlot());
3263 __ subsd(first.AsFpuRegister<XmmRegister>(),
3264 Address(CpuRegister(RSP), second.GetStackIndex()));
3265 }
Calin Juravle11351682014-10-23 15:38:15 +01003266 break;
3267 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003268
3269 default:
Calin Juravle11351682014-10-23 15:38:15 +01003270 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003271 }
3272}
3273
Calin Juravle34bacdf2014-10-07 20:23:36 +01003274void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3275 LocationSummary* locations =
3276 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3277 switch (mul->GetResultType()) {
3278 case Primitive::kPrimInt: {
3279 locations->SetInAt(0, Location::RequiresRegister());
3280 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003281 if (mul->InputAt(1)->IsIntConstant()) {
3282 // Can use 3 operand multiply.
3283 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3284 } else {
3285 locations->SetOut(Location::SameAsFirstInput());
3286 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003287 break;
3288 }
3289 case Primitive::kPrimLong: {
3290 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003291 locations->SetInAt(1, Location::Any());
3292 if (mul->InputAt(1)->IsLongConstant() &&
3293 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003294 // Can use 3 operand multiply.
3295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3296 } else {
3297 locations->SetOut(Location::SameAsFirstInput());
3298 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003299 break;
3300 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003301 case Primitive::kPrimFloat:
3302 case Primitive::kPrimDouble: {
3303 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003304 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003305 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003306 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003307 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003308
3309 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003310 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003311 }
3312}
3313
3314void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3315 LocationSummary* locations = mul->GetLocations();
3316 Location first = locations->InAt(0);
3317 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003318 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003319 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003320 case Primitive::kPrimInt:
3321 // The constant may have ended up in a register, so test explicitly to avoid
3322 // problems where the output may not be the same as the first operand.
3323 if (mul->InputAt(1)->IsIntConstant()) {
3324 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3325 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3326 } else if (second.IsRegister()) {
3327 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003328 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003329 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003330 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003331 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003332 __ imull(first.AsRegister<CpuRegister>(),
3333 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003334 }
3335 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003336 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003337 // The constant may have ended up in a register, so test explicitly to avoid
3338 // problems where the output may not be the same as the first operand.
3339 if (mul->InputAt(1)->IsLongConstant()) {
3340 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3341 if (IsInt<32>(value)) {
3342 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3343 Immediate(static_cast<int32_t>(value)));
3344 } else {
3345 // Have to use the constant area.
3346 DCHECK(first.Equals(out));
3347 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3348 }
3349 } else if (second.IsRegister()) {
3350 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003351 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003352 } else {
3353 DCHECK(second.IsDoubleStackSlot());
3354 DCHECK(first.Equals(out));
3355 __ imulq(first.AsRegister<CpuRegister>(),
3356 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003357 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003358 break;
3359 }
3360
Calin Juravleb5bfa962014-10-21 18:02:24 +01003361 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003362 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003363 if (second.IsFpuRegister()) {
3364 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3365 } else if (second.IsConstant()) {
3366 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003367 codegen_->LiteralFloatAddress(
3368 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003369 } else {
3370 DCHECK(second.IsStackSlot());
3371 __ mulss(first.AsFpuRegister<XmmRegister>(),
3372 Address(CpuRegister(RSP), second.GetStackIndex()));
3373 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003374 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003375 }
3376
3377 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003378 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003379 if (second.IsFpuRegister()) {
3380 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3381 } else if (second.IsConstant()) {
3382 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003383 codegen_->LiteralDoubleAddress(
3384 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003385 } else {
3386 DCHECK(second.IsDoubleStackSlot());
3387 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3388 Address(CpuRegister(RSP), second.GetStackIndex()));
3389 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003390 break;
3391 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003392
3393 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003394 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003395 }
3396}
3397
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003398void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3399 uint32_t stack_adjustment, bool is_float) {
3400 if (source.IsStackSlot()) {
3401 DCHECK(is_float);
3402 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3403 } else if (source.IsDoubleStackSlot()) {
3404 DCHECK(!is_float);
3405 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3406 } else {
3407 // Write the value to the temporary location on the stack and load to FP stack.
3408 if (is_float) {
3409 Location stack_temp = Location::StackSlot(temp_offset);
3410 codegen_->Move(stack_temp, source);
3411 __ flds(Address(CpuRegister(RSP), temp_offset));
3412 } else {
3413 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3414 codegen_->Move(stack_temp, source);
3415 __ fldl(Address(CpuRegister(RSP), temp_offset));
3416 }
3417 }
3418}
3419
3420void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3421 Primitive::Type type = rem->GetResultType();
3422 bool is_float = type == Primitive::kPrimFloat;
3423 size_t elem_size = Primitive::ComponentSize(type);
3424 LocationSummary* locations = rem->GetLocations();
3425 Location first = locations->InAt(0);
3426 Location second = locations->InAt(1);
3427 Location out = locations->Out();
3428
3429 // Create stack space for 2 elements.
3430 // TODO: enhance register allocator to ask for stack temporaries.
3431 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3432
3433 // Load the values to the FP stack in reverse order, using temporaries if needed.
3434 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3435 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3436
3437 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003438 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003439 __ Bind(&retry);
3440 __ fprem();
3441
3442 // Move FP status to AX.
3443 __ fstsw();
3444
3445 // And see if the argument reduction is complete. This is signaled by the
3446 // C2 FPU flag bit set to 0.
3447 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3448 __ j(kNotEqual, &retry);
3449
3450 // We have settled on the final value. Retrieve it into an XMM register.
3451 // Store FP top of stack to real stack.
3452 if (is_float) {
3453 __ fsts(Address(CpuRegister(RSP), 0));
3454 } else {
3455 __ fstl(Address(CpuRegister(RSP), 0));
3456 }
3457
3458 // Pop the 2 items from the FP stack.
3459 __ fucompp();
3460
3461 // Load the value from the stack into an XMM register.
3462 DCHECK(out.IsFpuRegister()) << out;
3463 if (is_float) {
3464 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3465 } else {
3466 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3467 }
3468
3469 // And remove the temporary stack space we allocated.
3470 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3471}
3472
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003473void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3474 DCHECK(instruction->IsDiv() || instruction->IsRem());
3475
3476 LocationSummary* locations = instruction->GetLocations();
3477 Location second = locations->InAt(1);
3478 DCHECK(second.IsConstant());
3479
3480 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3481 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003482 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003483
3484 DCHECK(imm == 1 || imm == -1);
3485
3486 switch (instruction->GetResultType()) {
3487 case Primitive::kPrimInt: {
3488 if (instruction->IsRem()) {
3489 __ xorl(output_register, output_register);
3490 } else {
3491 __ movl(output_register, input_register);
3492 if (imm == -1) {
3493 __ negl(output_register);
3494 }
3495 }
3496 break;
3497 }
3498
3499 case Primitive::kPrimLong: {
3500 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003501 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003502 } else {
3503 __ movq(output_register, input_register);
3504 if (imm == -1) {
3505 __ negq(output_register);
3506 }
3507 }
3508 break;
3509 }
3510
3511 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003512 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 }
3514}
3515
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003516void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003517 LocationSummary* locations = instruction->GetLocations();
3518 Location second = locations->InAt(1);
3519
3520 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3521 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3522
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003523 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003524 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3525 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003526
3527 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3528
3529 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003530 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003531 __ testl(numerator, numerator);
3532 __ cmov(kGreaterEqual, tmp, numerator);
3533 int shift = CTZ(imm);
3534 __ sarl(tmp, Immediate(shift));
3535
3536 if (imm < 0) {
3537 __ negl(tmp);
3538 }
3539
3540 __ movl(output_register, tmp);
3541 } else {
3542 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3543 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3544
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003545 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003546 __ addq(rdx, numerator);
3547 __ testq(numerator, numerator);
3548 __ cmov(kGreaterEqual, rdx, numerator);
3549 int shift = CTZ(imm);
3550 __ sarq(rdx, Immediate(shift));
3551
3552 if (imm < 0) {
3553 __ negq(rdx);
3554 }
3555
3556 __ movq(output_register, rdx);
3557 }
3558}
3559
3560void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3561 DCHECK(instruction->IsDiv() || instruction->IsRem());
3562
3563 LocationSummary* locations = instruction->GetLocations();
3564 Location second = locations->InAt(1);
3565
3566 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3567 : locations->GetTemp(0).AsRegister<CpuRegister>();
3568 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3569 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3570 : locations->Out().AsRegister<CpuRegister>();
3571 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3572
3573 DCHECK_EQ(RAX, eax.AsRegister());
3574 DCHECK_EQ(RDX, edx.AsRegister());
3575 if (instruction->IsDiv()) {
3576 DCHECK_EQ(RAX, out.AsRegister());
3577 } else {
3578 DCHECK_EQ(RDX, out.AsRegister());
3579 }
3580
3581 int64_t magic;
3582 int shift;
3583
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003584 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003585 if (instruction->GetResultType() == Primitive::kPrimInt) {
3586 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3587
3588 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3589
3590 __ movl(numerator, eax);
3591
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003592 __ movl(eax, Immediate(magic));
3593 __ imull(numerator);
3594
3595 if (imm > 0 && magic < 0) {
3596 __ addl(edx, numerator);
3597 } else if (imm < 0 && magic > 0) {
3598 __ subl(edx, numerator);
3599 }
3600
3601 if (shift != 0) {
3602 __ sarl(edx, Immediate(shift));
3603 }
3604
3605 __ movl(eax, edx);
3606 __ shrl(edx, Immediate(31));
3607 __ addl(edx, eax);
3608
3609 if (instruction->IsRem()) {
3610 __ movl(eax, numerator);
3611 __ imull(edx, Immediate(imm));
3612 __ subl(eax, edx);
3613 __ movl(edx, eax);
3614 } else {
3615 __ movl(eax, edx);
3616 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003617 } else {
3618 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3619
3620 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3621
3622 CpuRegister rax = eax;
3623 CpuRegister rdx = edx;
3624
3625 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3626
3627 // Save the numerator.
3628 __ movq(numerator, rax);
3629
3630 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003631 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003632
3633 // RDX:RAX = magic * numerator
3634 __ imulq(numerator);
3635
3636 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003637 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003638 __ addq(rdx, numerator);
3639 } else if (imm < 0 && magic > 0) {
3640 // RDX -= numerator
3641 __ subq(rdx, numerator);
3642 }
3643
3644 // Shift if needed.
3645 if (shift != 0) {
3646 __ sarq(rdx, Immediate(shift));
3647 }
3648
3649 // RDX += 1 if RDX < 0
3650 __ movq(rax, rdx);
3651 __ shrq(rdx, Immediate(63));
3652 __ addq(rdx, rax);
3653
3654 if (instruction->IsRem()) {
3655 __ movq(rax, numerator);
3656
3657 if (IsInt<32>(imm)) {
3658 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3659 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003660 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003661 }
3662
3663 __ subq(rax, rdx);
3664 __ movq(rdx, rax);
3665 } else {
3666 __ movq(rax, rdx);
3667 }
3668 }
3669}
3670
Calin Juravlebacfec32014-11-14 15:54:36 +00003671void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3672 DCHECK(instruction->IsDiv() || instruction->IsRem());
3673 Primitive::Type type = instruction->GetResultType();
3674 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3675
3676 bool is_div = instruction->IsDiv();
3677 LocationSummary* locations = instruction->GetLocations();
3678
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003679 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3680 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003681
Roland Levillain271ab9c2014-11-27 15:23:57 +00003682 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003683 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003684
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003685 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003686 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003687
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003688 if (imm == 0) {
3689 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3690 } else if (imm == 1 || imm == -1) {
3691 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003692 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003693 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003694 } else {
3695 DCHECK(imm <= -2 || imm >= 2);
3696 GenerateDivRemWithAnyConstant(instruction);
3697 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003698 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003699 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003700 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003701 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003702 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003703
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003704 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3705 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3706 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3707 // so it's safe to just use negl instead of more complex comparisons.
3708 if (type == Primitive::kPrimInt) {
3709 __ cmpl(second_reg, Immediate(-1));
3710 __ j(kEqual, slow_path->GetEntryLabel());
3711 // edx:eax <- sign-extended of eax
3712 __ cdq();
3713 // eax = quotient, edx = remainder
3714 __ idivl(second_reg);
3715 } else {
3716 __ cmpq(second_reg, Immediate(-1));
3717 __ j(kEqual, slow_path->GetEntryLabel());
3718 // rdx:rax <- sign-extended of rax
3719 __ cqo();
3720 // rax = quotient, rdx = remainder
3721 __ idivq(second_reg);
3722 }
3723 __ Bind(slow_path->GetExitLabel());
3724 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003725}
3726
Calin Juravle7c4954d2014-10-28 16:57:40 +00003727void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3728 LocationSummary* locations =
3729 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3730 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003731 case Primitive::kPrimInt:
3732 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003733 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003734 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003735 locations->SetOut(Location::SameAsFirstInput());
3736 // Intel uses edx:eax as the dividend.
3737 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003738 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3739 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3740 // output and request another temp.
3741 if (div->InputAt(1)->IsConstant()) {
3742 locations->AddTemp(Location::RequiresRegister());
3743 }
Calin Juravled0d48522014-11-04 16:40:20 +00003744 break;
3745 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003746
Calin Juravle7c4954d2014-10-28 16:57:40 +00003747 case Primitive::kPrimFloat:
3748 case Primitive::kPrimDouble: {
3749 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003750 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003751 locations->SetOut(Location::SameAsFirstInput());
3752 break;
3753 }
3754
3755 default:
3756 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3757 }
3758}
3759
3760void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3761 LocationSummary* locations = div->GetLocations();
3762 Location first = locations->InAt(0);
3763 Location second = locations->InAt(1);
3764 DCHECK(first.Equals(locations->Out()));
3765
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003766 Primitive::Type type = div->GetResultType();
3767 switch (type) {
3768 case Primitive::kPrimInt:
3769 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003770 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003771 break;
3772 }
3773
Calin Juravle7c4954d2014-10-28 16:57:40 +00003774 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003775 if (second.IsFpuRegister()) {
3776 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3777 } else if (second.IsConstant()) {
3778 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003779 codegen_->LiteralFloatAddress(
3780 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003781 } else {
3782 DCHECK(second.IsStackSlot());
3783 __ divss(first.AsFpuRegister<XmmRegister>(),
3784 Address(CpuRegister(RSP), second.GetStackIndex()));
3785 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003786 break;
3787 }
3788
3789 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003790 if (second.IsFpuRegister()) {
3791 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3792 } else if (second.IsConstant()) {
3793 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003794 codegen_->LiteralDoubleAddress(
3795 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003796 } else {
3797 DCHECK(second.IsDoubleStackSlot());
3798 __ divsd(first.AsFpuRegister<XmmRegister>(),
3799 Address(CpuRegister(RSP), second.GetStackIndex()));
3800 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003801 break;
3802 }
3803
3804 default:
3805 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3806 }
3807}
3808
Calin Juravlebacfec32014-11-14 15:54:36 +00003809void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003810 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003811 LocationSummary* locations =
3812 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003813
3814 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003815 case Primitive::kPrimInt:
3816 case Primitive::kPrimLong: {
3817 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003818 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003819 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3820 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003821 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3822 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3823 // output and request another temp.
3824 if (rem->InputAt(1)->IsConstant()) {
3825 locations->AddTemp(Location::RequiresRegister());
3826 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003827 break;
3828 }
3829
3830 case Primitive::kPrimFloat:
3831 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003832 locations->SetInAt(0, Location::Any());
3833 locations->SetInAt(1, Location::Any());
3834 locations->SetOut(Location::RequiresFpuRegister());
3835 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003836 break;
3837 }
3838
3839 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003840 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003841 }
3842}
3843
3844void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3845 Primitive::Type type = rem->GetResultType();
3846 switch (type) {
3847 case Primitive::kPrimInt:
3848 case Primitive::kPrimLong: {
3849 GenerateDivRemIntegral(rem);
3850 break;
3851 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003852 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003853 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003854 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003855 break;
3856 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003857 default:
3858 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3859 }
3860}
3861
Calin Juravled0d48522014-11-04 16:40:20 +00003862void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003863 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003864 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003865}
3866
3867void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003868 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003869 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3870 codegen_->AddSlowPath(slow_path);
3871
3872 LocationSummary* locations = instruction->GetLocations();
3873 Location value = locations->InAt(0);
3874
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003875 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003876 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003877 case Primitive::kPrimByte:
3878 case Primitive::kPrimChar:
3879 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003880 case Primitive::kPrimInt: {
3881 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003882 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003883 __ j(kEqual, slow_path->GetEntryLabel());
3884 } else if (value.IsStackSlot()) {
3885 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3886 __ j(kEqual, slow_path->GetEntryLabel());
3887 } else {
3888 DCHECK(value.IsConstant()) << value;
3889 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003890 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003891 }
3892 }
3893 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003894 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003895 case Primitive::kPrimLong: {
3896 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003897 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003898 __ j(kEqual, slow_path->GetEntryLabel());
3899 } else if (value.IsDoubleStackSlot()) {
3900 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3901 __ j(kEqual, slow_path->GetEntryLabel());
3902 } else {
3903 DCHECK(value.IsConstant()) << value;
3904 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003905 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003906 }
3907 }
3908 break;
3909 }
3910 default:
3911 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003912 }
Calin Juravled0d48522014-11-04 16:40:20 +00003913}
3914
Calin Juravle9aec02f2014-11-18 23:06:35 +00003915void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3916 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3917
3918 LocationSummary* locations =
3919 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3920
3921 switch (op->GetResultType()) {
3922 case Primitive::kPrimInt:
3923 case Primitive::kPrimLong: {
3924 locations->SetInAt(0, Location::RequiresRegister());
3925 // The shift count needs to be in CL.
3926 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3927 locations->SetOut(Location::SameAsFirstInput());
3928 break;
3929 }
3930 default:
3931 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3932 }
3933}
3934
3935void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3936 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3937
3938 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003939 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003940 Location second = locations->InAt(1);
3941
3942 switch (op->GetResultType()) {
3943 case Primitive::kPrimInt: {
3944 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003945 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003946 if (op->IsShl()) {
3947 __ shll(first_reg, second_reg);
3948 } else if (op->IsShr()) {
3949 __ sarl(first_reg, second_reg);
3950 } else {
3951 __ shrl(first_reg, second_reg);
3952 }
3953 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003954 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003955 if (op->IsShl()) {
3956 __ shll(first_reg, imm);
3957 } else if (op->IsShr()) {
3958 __ sarl(first_reg, imm);
3959 } else {
3960 __ shrl(first_reg, imm);
3961 }
3962 }
3963 break;
3964 }
3965 case Primitive::kPrimLong: {
3966 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003967 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003968 if (op->IsShl()) {
3969 __ shlq(first_reg, second_reg);
3970 } else if (op->IsShr()) {
3971 __ sarq(first_reg, second_reg);
3972 } else {
3973 __ shrq(first_reg, second_reg);
3974 }
3975 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003976 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003977 if (op->IsShl()) {
3978 __ shlq(first_reg, imm);
3979 } else if (op->IsShr()) {
3980 __ sarq(first_reg, imm);
3981 } else {
3982 __ shrq(first_reg, imm);
3983 }
3984 }
3985 break;
3986 }
3987 default:
3988 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003989 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003990 }
3991}
3992
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003993void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3994 LocationSummary* locations =
3995 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3996
3997 switch (ror->GetResultType()) {
3998 case Primitive::kPrimInt:
3999 case Primitive::kPrimLong: {
4000 locations->SetInAt(0, Location::RequiresRegister());
4001 // The shift count needs to be in CL (unless it is a constant).
4002 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4003 locations->SetOut(Location::SameAsFirstInput());
4004 break;
4005 }
4006 default:
4007 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4008 UNREACHABLE();
4009 }
4010}
4011
4012void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4013 LocationSummary* locations = ror->GetLocations();
4014 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4015 Location second = locations->InAt(1);
4016
4017 switch (ror->GetResultType()) {
4018 case Primitive::kPrimInt:
4019 if (second.IsRegister()) {
4020 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4021 __ rorl(first_reg, second_reg);
4022 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004023 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004024 __ rorl(first_reg, imm);
4025 }
4026 break;
4027 case Primitive::kPrimLong:
4028 if (second.IsRegister()) {
4029 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4030 __ rorq(first_reg, second_reg);
4031 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004032 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004033 __ rorq(first_reg, imm);
4034 }
4035 break;
4036 default:
4037 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4038 UNREACHABLE();
4039 }
4040}
4041
Calin Juravle9aec02f2014-11-18 23:06:35 +00004042void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4043 HandleShift(shl);
4044}
4045
4046void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4047 HandleShift(shl);
4048}
4049
4050void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4051 HandleShift(shr);
4052}
4053
4054void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4055 HandleShift(shr);
4056}
4057
4058void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4059 HandleShift(ushr);
4060}
4061
4062void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4063 HandleShift(ushr);
4064}
4065
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004066void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004067 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004068 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004069 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004070 if (instruction->IsStringAlloc()) {
4071 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4072 } else {
4073 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4074 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4075 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004076 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004077}
4078
4079void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004080 // Note: if heap poisoning is enabled, the entry point takes cares
4081 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004082 if (instruction->IsStringAlloc()) {
4083 // String is allocated through StringFactory. Call NewEmptyString entry point.
4084 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004085 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004086 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4087 __ call(Address(temp, code_offset.SizeValue()));
4088 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4089 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004090 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004091 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4092 DCHECK(!codegen_->IsLeafMethod());
4093 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004094}
4095
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004096void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4097 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004098 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004099 InvokeRuntimeCallingConvention calling_convention;
4100 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004101 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004102 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004103 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004104}
4105
4106void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4107 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004108 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
4109 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004110 // Note: if heap poisoning is enabled, the entry point takes cares
4111 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004112 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004113 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004114
4115 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004116}
4117
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004118void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004119 LocationSummary* locations =
4120 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004121 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4122 if (location.IsStackSlot()) {
4123 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4124 } else if (location.IsDoubleStackSlot()) {
4125 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4126 }
4127 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004128}
4129
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004130void InstructionCodeGeneratorX86_64::VisitParameterValue(
4131 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004132 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004133}
4134
4135void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4136 LocationSummary* locations =
4137 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4138 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4139}
4140
4141void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4142 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4143 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004144}
4145
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004146void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4147 LocationSummary* locations =
4148 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4149 locations->SetInAt(0, Location::RequiresRegister());
4150 locations->SetOut(Location::RequiresRegister());
4151}
4152
4153void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4154 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004155 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004156 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004157 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004158 __ movq(locations->Out().AsRegister<CpuRegister>(),
4159 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004160 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004161 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004162 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004163 __ movq(locations->Out().AsRegister<CpuRegister>(),
4164 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4165 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004166 __ movq(locations->Out().AsRegister<CpuRegister>(),
4167 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004168 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004169}
4170
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004171void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004172 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004173 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004174 locations->SetInAt(0, Location::RequiresRegister());
4175 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004176}
4177
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004178void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4179 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004180 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4181 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004182 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004183 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004184 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004185 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004186 break;
4187
4188 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004189 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004190 break;
4191
4192 default:
4193 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4194 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004195}
4196
David Brazdil66d126e2015-04-03 16:02:44 +01004197void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4198 LocationSummary* locations =
4199 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4200 locations->SetInAt(0, Location::RequiresRegister());
4201 locations->SetOut(Location::SameAsFirstInput());
4202}
4203
4204void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004205 LocationSummary* locations = bool_not->GetLocations();
4206 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4207 locations->Out().AsRegister<CpuRegister>().AsRegister());
4208 Location out = locations->Out();
4209 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4210}
4211
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004212void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004213 LocationSummary* locations =
4214 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004215 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004216 locations->SetInAt(i, Location::Any());
4217 }
4218 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004219}
4220
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004221void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004222 LOG(FATAL) << "Unimplemented";
4223}
4224
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004225void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004226 /*
4227 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004228 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004229 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4230 */
4231 switch (kind) {
4232 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004233 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004234 break;
4235 }
4236 case MemBarrierKind::kAnyStore:
4237 case MemBarrierKind::kLoadAny:
4238 case MemBarrierKind::kStoreStore: {
4239 // nop
4240 break;
4241 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004242 case MemBarrierKind::kNTStoreStore:
4243 // Non-Temporal Store/Store needs an explicit fence.
4244 MemoryFence(/* non-temporal */ true);
4245 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004246 }
4247}
4248
4249void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4250 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4251
Roland Levillain0d5a2812015-11-13 10:07:31 +00004252 bool object_field_get_with_read_barrier =
4253 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004254 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004255 new (GetGraph()->GetArena()) LocationSummary(instruction,
4256 object_field_get_with_read_barrier ?
4257 LocationSummary::kCallOnSlowPath :
4258 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004259 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004260 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004261 }
Calin Juravle52c48962014-12-16 17:02:57 +00004262 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004263 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4264 locations->SetOut(Location::RequiresFpuRegister());
4265 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004266 // The output overlaps for an object field get when read barriers
4267 // are enabled: we do not want the move to overwrite the object's
4268 // location, as we need it to emit the read barrier.
4269 locations->SetOut(
4270 Location::RequiresRegister(),
4271 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004272 }
Calin Juravle52c48962014-12-16 17:02:57 +00004273}
4274
4275void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4276 const FieldInfo& field_info) {
4277 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4278
4279 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004280 Location base_loc = locations->InAt(0);
4281 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004282 Location out = locations->Out();
4283 bool is_volatile = field_info.IsVolatile();
4284 Primitive::Type field_type = field_info.GetFieldType();
4285 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4286
4287 switch (field_type) {
4288 case Primitive::kPrimBoolean: {
4289 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4290 break;
4291 }
4292
4293 case Primitive::kPrimByte: {
4294 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4295 break;
4296 }
4297
4298 case Primitive::kPrimShort: {
4299 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4300 break;
4301 }
4302
4303 case Primitive::kPrimChar: {
4304 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4305 break;
4306 }
4307
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004308 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004309 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4310 break;
4311 }
4312
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004313 case Primitive::kPrimNot: {
4314 // /* HeapReference<Object> */ out = *(base + offset)
4315 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004316 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004317 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004318 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004319 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004320 if (is_volatile) {
4321 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4322 }
4323 } else {
4324 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4325 codegen_->MaybeRecordImplicitNullCheck(instruction);
4326 if (is_volatile) {
4327 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4328 }
4329 // If read barriers are enabled, emit read barriers other than
4330 // Baker's using a slow path (and also unpoison the loaded
4331 // reference, if heap poisoning is enabled).
4332 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4333 }
4334 break;
4335 }
4336
Calin Juravle52c48962014-12-16 17:02:57 +00004337 case Primitive::kPrimLong: {
4338 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4339 break;
4340 }
4341
4342 case Primitive::kPrimFloat: {
4343 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4344 break;
4345 }
4346
4347 case Primitive::kPrimDouble: {
4348 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4349 break;
4350 }
4351
4352 case Primitive::kPrimVoid:
4353 LOG(FATAL) << "Unreachable type " << field_type;
4354 UNREACHABLE();
4355 }
4356
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004357 if (field_type == Primitive::kPrimNot) {
4358 // Potential implicit null checks, in the case of reference
4359 // fields, are handled in the previous switch statement.
4360 } else {
4361 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004362 }
Roland Levillain4d027112015-07-01 15:41:14 +01004363
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004364 if (is_volatile) {
4365 if (field_type == Primitive::kPrimNot) {
4366 // Memory barriers, in the case of references, are also handled
4367 // in the previous switch statement.
4368 } else {
4369 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4370 }
Roland Levillain4d027112015-07-01 15:41:14 +01004371 }
Calin Juravle52c48962014-12-16 17:02:57 +00004372}
4373
4374void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4375 const FieldInfo& field_info) {
4376 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4377
4378 LocationSummary* locations =
4379 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004380 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004381 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004382 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004383 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004384
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004385 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004386 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004387 if (is_volatile) {
4388 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4389 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4390 } else {
4391 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4392 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004393 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004394 if (is_volatile) {
4395 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4396 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4397 } else {
4398 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4399 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004400 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004401 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004402 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004403 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004404 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004405 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4406 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004407 locations->AddTemp(Location::RequiresRegister());
4408 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409}
4410
Calin Juravle52c48962014-12-16 17:02:57 +00004411void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004412 const FieldInfo& field_info,
4413 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004414 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4415
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004416 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004417 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4418 Location value = locations->InAt(1);
4419 bool is_volatile = field_info.IsVolatile();
4420 Primitive::Type field_type = field_info.GetFieldType();
4421 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4422
4423 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004424 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004425 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004426
Mark Mendellea5af682015-10-22 17:35:49 -04004427 bool maybe_record_implicit_null_check_done = false;
4428
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004429 switch (field_type) {
4430 case Primitive::kPrimBoolean:
4431 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004432 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004433 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004434 __ movb(Address(base, offset), Immediate(v));
4435 } else {
4436 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4437 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004438 break;
4439 }
4440
4441 case Primitive::kPrimShort:
4442 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004443 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004444 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004445 __ movw(Address(base, offset), Immediate(v));
4446 } else {
4447 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4448 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004449 break;
4450 }
4451
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004452 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004453 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004454 if (value.IsConstant()) {
4455 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004456 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4457 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4458 // Note: if heap poisoning is enabled, no need to poison
4459 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004460 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004461 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004462 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4463 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4464 __ movl(temp, value.AsRegister<CpuRegister>());
4465 __ PoisonHeapReference(temp);
4466 __ movl(Address(base, offset), temp);
4467 } else {
4468 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4469 }
Mark Mendell40741f32015-04-20 22:10:34 -04004470 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004471 break;
4472 }
4473
4474 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004475 if (value.IsConstant()) {
4476 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004477 codegen_->MoveInt64ToAddress(Address(base, offset),
4478 Address(base, offset + sizeof(int32_t)),
4479 v,
4480 instruction);
4481 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004482 } else {
4483 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4484 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004485 break;
4486 }
4487
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004488 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004489 if (value.IsConstant()) {
4490 int32_t v =
4491 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4492 __ movl(Address(base, offset), Immediate(v));
4493 } else {
4494 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4495 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004496 break;
4497 }
4498
4499 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004500 if (value.IsConstant()) {
4501 int64_t v =
4502 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4503 codegen_->MoveInt64ToAddress(Address(base, offset),
4504 Address(base, offset + sizeof(int32_t)),
4505 v,
4506 instruction);
4507 maybe_record_implicit_null_check_done = true;
4508 } else {
4509 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4510 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004511 break;
4512 }
4513
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004514 case Primitive::kPrimVoid:
4515 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004516 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004517 }
Calin Juravle52c48962014-12-16 17:02:57 +00004518
Mark Mendellea5af682015-10-22 17:35:49 -04004519 if (!maybe_record_implicit_null_check_done) {
4520 codegen_->MaybeRecordImplicitNullCheck(instruction);
4521 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004522
4523 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4524 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4525 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004526 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004527 }
4528
Calin Juravle52c48962014-12-16 17:02:57 +00004529 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004530 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004531 }
4532}
4533
4534void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4535 HandleFieldSet(instruction, instruction->GetFieldInfo());
4536}
4537
4538void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004539 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004540}
4541
4542void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004543 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004544}
4545
4546void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004547 HandleFieldGet(instruction, instruction->GetFieldInfo());
4548}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004549
Calin Juravle52c48962014-12-16 17:02:57 +00004550void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4551 HandleFieldGet(instruction);
4552}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004553
Calin Juravle52c48962014-12-16 17:02:57 +00004554void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4555 HandleFieldGet(instruction, instruction->GetFieldInfo());
4556}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004557
Calin Juravle52c48962014-12-16 17:02:57 +00004558void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4559 HandleFieldSet(instruction, instruction->GetFieldInfo());
4560}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004561
Calin Juravle52c48962014-12-16 17:02:57 +00004562void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004563 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004564}
4565
Calin Juravlee460d1d2015-09-29 04:52:17 +01004566void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4567 HUnresolvedInstanceFieldGet* instruction) {
4568 FieldAccessCallingConventionX86_64 calling_convention;
4569 codegen_->CreateUnresolvedFieldLocationSummary(
4570 instruction, instruction->GetFieldType(), calling_convention);
4571}
4572
4573void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4574 HUnresolvedInstanceFieldGet* instruction) {
4575 FieldAccessCallingConventionX86_64 calling_convention;
4576 codegen_->GenerateUnresolvedFieldAccess(instruction,
4577 instruction->GetFieldType(),
4578 instruction->GetFieldIndex(),
4579 instruction->GetDexPc(),
4580 calling_convention);
4581}
4582
4583void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4584 HUnresolvedInstanceFieldSet* instruction) {
4585 FieldAccessCallingConventionX86_64 calling_convention;
4586 codegen_->CreateUnresolvedFieldLocationSummary(
4587 instruction, instruction->GetFieldType(), calling_convention);
4588}
4589
4590void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4591 HUnresolvedInstanceFieldSet* instruction) {
4592 FieldAccessCallingConventionX86_64 calling_convention;
4593 codegen_->GenerateUnresolvedFieldAccess(instruction,
4594 instruction->GetFieldType(),
4595 instruction->GetFieldIndex(),
4596 instruction->GetDexPc(),
4597 calling_convention);
4598}
4599
4600void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4601 HUnresolvedStaticFieldGet* instruction) {
4602 FieldAccessCallingConventionX86_64 calling_convention;
4603 codegen_->CreateUnresolvedFieldLocationSummary(
4604 instruction, instruction->GetFieldType(), calling_convention);
4605}
4606
4607void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4608 HUnresolvedStaticFieldGet* instruction) {
4609 FieldAccessCallingConventionX86_64 calling_convention;
4610 codegen_->GenerateUnresolvedFieldAccess(instruction,
4611 instruction->GetFieldType(),
4612 instruction->GetFieldIndex(),
4613 instruction->GetDexPc(),
4614 calling_convention);
4615}
4616
4617void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4618 HUnresolvedStaticFieldSet* instruction) {
4619 FieldAccessCallingConventionX86_64 calling_convention;
4620 codegen_->CreateUnresolvedFieldLocationSummary(
4621 instruction, instruction->GetFieldType(), calling_convention);
4622}
4623
4624void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4625 HUnresolvedStaticFieldSet* instruction) {
4626 FieldAccessCallingConventionX86_64 calling_convention;
4627 codegen_->GenerateUnresolvedFieldAccess(instruction,
4628 instruction->GetFieldType(),
4629 instruction->GetFieldIndex(),
4630 instruction->GetDexPc(),
4631 calling_convention);
4632}
4633
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004634void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004635 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4636 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4637 ? Location::RequiresRegister()
4638 : Location::Any();
4639 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004640}
4641
Calin Juravle2ae48182016-03-16 14:05:09 +00004642void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4643 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004644 return;
4645 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004646 LocationSummary* locations = instruction->GetLocations();
4647 Location obj = locations->InAt(0);
4648
4649 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004650 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004651}
4652
Calin Juravle2ae48182016-03-16 14:05:09 +00004653void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004654 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004655 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004656
4657 LocationSummary* locations = instruction->GetLocations();
4658 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004659
4660 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004661 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004662 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004663 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004664 } else {
4665 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004666 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004667 __ jmp(slow_path->GetEntryLabel());
4668 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004669 }
4670 __ j(kEqual, slow_path->GetEntryLabel());
4671}
4672
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004673void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004674 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004675}
4676
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004677void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004678 bool object_array_get_with_read_barrier =
4679 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004680 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004681 new (GetGraph()->GetArena()) LocationSummary(instruction,
4682 object_array_get_with_read_barrier ?
4683 LocationSummary::kCallOnSlowPath :
4684 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004685 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004686 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004687 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004688 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004689 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004690 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4691 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4692 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004693 // The output overlaps for an object array get when read barriers
4694 // are enabled: we do not want the move to overwrite the array's
4695 // location, as we need it to emit the read barrier.
4696 locations->SetOut(
4697 Location::RequiresRegister(),
4698 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004699 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004700}
4701
4702void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4703 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004704 Location obj_loc = locations->InAt(0);
4705 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004706 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004707 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004708 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004709
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004710 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004711 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004712 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004713 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004714 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715 break;
4716 }
4717
4718 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004719 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004720 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004721 break;
4722 }
4723
4724 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004725 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004726 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004727 break;
4728 }
4729
4730 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004731 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004732 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4733 // Branch cases into compressed and uncompressed for each index's type.
4734 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4735 NearLabel done, not_compressed;
4736 __ cmpl(Address(obj, count_offset), Immediate(0));
4737 codegen_->MaybeRecordImplicitNullCheck(instruction);
4738 __ j(kGreaterEqual, &not_compressed);
4739 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4740 __ jmp(&done);
4741 __ Bind(&not_compressed);
4742 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4743 __ Bind(&done);
4744 } else {
4745 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4746 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004747 break;
4748 }
4749
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004750 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004751 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004752 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004753 break;
4754 }
4755
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004756 case Primitive::kPrimNot: {
4757 static_assert(
4758 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4759 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004760 // /* HeapReference<Object> */ out =
4761 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4762 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004763 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004764 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004765 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004766 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004767 } else {
4768 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004769 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4770 codegen_->MaybeRecordImplicitNullCheck(instruction);
4771 // If read barriers are enabled, emit read barriers other than
4772 // Baker's using a slow path (and also unpoison the loaded
4773 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004774 if (index.IsConstant()) {
4775 uint32_t offset =
4776 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004777 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4778 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004779 codegen_->MaybeGenerateReadBarrierSlow(
4780 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4781 }
4782 }
4783 break;
4784 }
4785
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004786 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004787 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004788 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004789 break;
4790 }
4791
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004792 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004793 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004794 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004795 break;
4796 }
4797
4798 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004799 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004800 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004801 break;
4802 }
4803
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004804 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004805 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004806 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004807 }
Roland Levillain4d027112015-07-01 15:41:14 +01004808
4809 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004810 // Potential implicit null checks, in the case of reference
4811 // arrays, are handled in the previous switch statement.
4812 } else {
4813 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004814 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004815}
4816
4817void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004818 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004819
4820 bool needs_write_barrier =
4821 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004822 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004823
Nicolas Geoffray39468442014-09-02 15:17:15 +01004824 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004825 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004826 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004827 LocationSummary::kCallOnSlowPath :
4828 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004829
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004830 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004831 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4832 if (Primitive::IsFloatingPointType(value_type)) {
4833 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004834 } else {
4835 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4836 }
4837
4838 if (needs_write_barrier) {
4839 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004840 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004841 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004842 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004843}
4844
4845void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4846 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004847 Location array_loc = locations->InAt(0);
4848 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004849 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004850 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004851 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004852 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004853 bool needs_write_barrier =
4854 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004855 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4856 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4857 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004858
4859 switch (value_type) {
4860 case Primitive::kPrimBoolean:
4861 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004862 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004863 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004864 if (value.IsRegister()) {
4865 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004866 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004867 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004868 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004869 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004870 break;
4871 }
4872
4873 case Primitive::kPrimShort:
4874 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004875 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004876 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004877 if (value.IsRegister()) {
4878 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004879 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004880 DCHECK(value.IsConstant()) << value;
4881 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004882 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004883 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004884 break;
4885 }
4886
4887 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004888 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004889 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004890
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004891 if (!value.IsRegister()) {
4892 // Just setting null.
4893 DCHECK(instruction->InputAt(2)->IsNullConstant());
4894 DCHECK(value.IsConstant()) << value;
4895 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004896 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004897 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004898 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004899 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004900 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004901
4902 DCHECK(needs_write_barrier);
4903 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004904 // We cannot use a NearLabel for `done`, as its range may be too
4905 // short when Baker read barriers are enabled.
4906 Label done;
4907 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004908 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004909 Location temp_loc = locations->GetTemp(0);
4910 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004911 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004912 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4913 codegen_->AddSlowPath(slow_path);
4914 if (instruction->GetValueCanBeNull()) {
4915 __ testl(register_value, register_value);
4916 __ j(kNotEqual, &not_null);
4917 __ movl(address, Immediate(0));
4918 codegen_->MaybeRecordImplicitNullCheck(instruction);
4919 __ jmp(&done);
4920 __ Bind(&not_null);
4921 }
4922
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004923 // Note that when Baker read barriers are enabled, the type
4924 // checks are performed without read barriers. This is fine,
4925 // even in the case where a class object is in the from-space
4926 // after the flip, as a comparison involving such a type would
4927 // not produce a false positive; it may of course produce a
4928 // false negative, in which case we would take the ArraySet
4929 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004930
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004931 // /* HeapReference<Class> */ temp = array->klass_
4932 __ movl(temp, Address(array, class_offset));
4933 codegen_->MaybeRecordImplicitNullCheck(instruction);
4934 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004935
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004936 // /* HeapReference<Class> */ temp = temp->component_type_
4937 __ movl(temp, Address(temp, component_offset));
4938 // If heap poisoning is enabled, no need to unpoison `temp`
4939 // nor the object reference in `register_value->klass`, as
4940 // we are comparing two poisoned references.
4941 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004942
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004943 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4944 __ j(kEqual, &do_put);
4945 // If heap poisoning is enabled, the `temp` reference has
4946 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004947 __ MaybeUnpoisonHeapReference(temp);
4948
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004949 // If heap poisoning is enabled, no need to unpoison the
4950 // heap reference loaded below, as it is only used for a
4951 // comparison with null.
4952 __ cmpl(Address(temp, super_offset), Immediate(0));
4953 __ j(kNotEqual, slow_path->GetEntryLabel());
4954 __ Bind(&do_put);
4955 } else {
4956 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004957 }
4958 }
4959
4960 if (kPoisonHeapReferences) {
4961 __ movl(temp, register_value);
4962 __ PoisonHeapReference(temp);
4963 __ movl(address, temp);
4964 } else {
4965 __ movl(address, register_value);
4966 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004967 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004968 codegen_->MaybeRecordImplicitNullCheck(instruction);
4969 }
4970
4971 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4972 codegen_->MarkGCCard(
4973 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4974 __ Bind(&done);
4975
4976 if (slow_path != nullptr) {
4977 __ Bind(slow_path->GetExitLabel());
4978 }
4979
4980 break;
4981 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004982
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004983 case Primitive::kPrimInt: {
4984 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004985 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004986 if (value.IsRegister()) {
4987 __ movl(address, value.AsRegister<CpuRegister>());
4988 } else {
4989 DCHECK(value.IsConstant()) << value;
4990 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4991 __ movl(address, Immediate(v));
4992 }
4993 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004994 break;
4995 }
4996
4997 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004998 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004999 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005000 if (value.IsRegister()) {
5001 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005002 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005003 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005004 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005005 Address address_high =
5006 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005007 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005008 }
5009 break;
5010 }
5011
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005012 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005013 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005014 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005015 if (value.IsFpuRegister()) {
5016 __ movss(address, value.AsFpuRegister<XmmRegister>());
5017 } else {
5018 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005019 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005020 __ movl(address, Immediate(v));
5021 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005022 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005023 break;
5024 }
5025
5026 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005027 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005028 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005029 if (value.IsFpuRegister()) {
5030 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5031 codegen_->MaybeRecordImplicitNullCheck(instruction);
5032 } else {
5033 int64_t v =
5034 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005035 Address address_high =
5036 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005037 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5038 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005039 break;
5040 }
5041
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005042 case Primitive::kPrimVoid:
5043 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005044 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005045 }
5046}
5047
5048void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005049 LocationSummary* locations =
5050 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005051 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005052 if (!instruction->IsEmittedAtUseSite()) {
5053 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5054 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005055}
5056
5057void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005058 if (instruction->IsEmittedAtUseSite()) {
5059 return;
5060 }
5061
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005062 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005063 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005064 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5065 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005066 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005067 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005068 // Mask out most significant bit in case the array is String's array of char.
5069 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
5070 __ andl(out, Immediate(INT32_MAX));
5071 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005072}
5073
5074void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005075 RegisterSet caller_saves = RegisterSet::Empty();
5076 InvokeRuntimeCallingConvention calling_convention;
5077 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5078 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5079 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005080 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005081 HInstruction* length = instruction->InputAt(1);
5082 if (!length->IsEmittedAtUseSite()) {
5083 locations->SetInAt(1, Location::RegisterOrConstant(length));
5084 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005085}
5086
5087void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5088 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005089 Location index_loc = locations->InAt(0);
5090 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005091 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005092
Mark Mendell99dbd682015-04-22 16:18:52 -04005093 if (length_loc.IsConstant()) {
5094 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5095 if (index_loc.IsConstant()) {
5096 // BCE will remove the bounds check if we are guarenteed to pass.
5097 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5098 if (index < 0 || index >= length) {
5099 codegen_->AddSlowPath(slow_path);
5100 __ jmp(slow_path->GetEntryLabel());
5101 } else {
5102 // Some optimization after BCE may have generated this, and we should not
5103 // generate a bounds check if it is a valid range.
5104 }
5105 return;
5106 }
5107
5108 // We have to reverse the jump condition because the length is the constant.
5109 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5110 __ cmpl(index_reg, Immediate(length));
5111 codegen_->AddSlowPath(slow_path);
5112 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005113 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005114 HInstruction* array_length = instruction->InputAt(1);
5115 if (array_length->IsEmittedAtUseSite()) {
5116 // Address the length field in the array.
5117 DCHECK(array_length->IsArrayLength());
5118 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5119 Location array_loc = array_length->GetLocations()->InAt(0);
5120 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005121 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5122 CpuRegister length_reg = CpuRegister(TMP);
5123 __ movl(length_reg, array_len);
5124 codegen_->MaybeRecordImplicitNullCheck(array_length);
5125 __ andl(length_reg, Immediate(INT32_MAX));
5126 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005127 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005128 // Checking the bound for general case:
5129 // Array of char or String's array when the compression feature off.
5130 if (index_loc.IsConstant()) {
5131 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5132 __ cmpl(array_len, Immediate(value));
5133 } else {
5134 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5135 }
5136 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005137 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005138 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005139 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005140 }
5141 codegen_->AddSlowPath(slow_path);
5142 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005143 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005144}
5145
5146void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5147 CpuRegister card,
5148 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005149 CpuRegister value,
5150 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005151 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005152 if (value_can_be_null) {
5153 __ testl(value, value);
5154 __ j(kEqual, &is_null);
5155 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005156 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005157 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005158 __ movq(temp, object);
5159 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005160 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005161 if (value_can_be_null) {
5162 __ Bind(&is_null);
5163 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005164}
5165
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005166void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005167 LOG(FATAL) << "Unimplemented";
5168}
5169
5170void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005171 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5172}
5173
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005174void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005175 LocationSummary* locations =
5176 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005177 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005178}
5179
5180void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005181 HBasicBlock* block = instruction->GetBlock();
5182 if (block->GetLoopInformation() != nullptr) {
5183 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5184 // The back edge will generate the suspend check.
5185 return;
5186 }
5187 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5188 // The goto will generate the suspend check.
5189 return;
5190 }
5191 GenerateSuspendCheck(instruction, nullptr);
5192}
5193
5194void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5195 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005196 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005197 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5198 if (slow_path == nullptr) {
5199 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5200 instruction->SetSlowPath(slow_path);
5201 codegen_->AddSlowPath(slow_path);
5202 if (successor != nullptr) {
5203 DCHECK(successor->IsLoopHeader());
5204 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5205 }
5206 } else {
5207 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5208 }
5209
Andreas Gampe542451c2016-07-26 09:02:02 -07005210 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005211 /* no_rip */ true),
5212 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005213 if (successor == nullptr) {
5214 __ j(kNotEqual, slow_path->GetEntryLabel());
5215 __ Bind(slow_path->GetReturnLabel());
5216 } else {
5217 __ j(kEqual, codegen_->GetLabelOf(successor));
5218 __ jmp(slow_path->GetEntryLabel());
5219 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005220}
5221
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005222X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5223 return codegen_->GetAssembler();
5224}
5225
5226void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005227 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005228 Location source = move->GetSource();
5229 Location destination = move->GetDestination();
5230
5231 if (source.IsRegister()) {
5232 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005233 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005234 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005235 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005236 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005237 } else {
5238 DCHECK(destination.IsDoubleStackSlot());
5239 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005240 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005241 }
5242 } else if (source.IsStackSlot()) {
5243 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005244 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005245 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005246 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005247 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005248 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005249 } else {
5250 DCHECK(destination.IsStackSlot());
5251 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5252 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5253 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005254 } else if (source.IsDoubleStackSlot()) {
5255 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005256 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005257 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005258 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005259 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5260 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005261 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005262 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005263 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5264 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5265 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005266 } else if (source.IsConstant()) {
5267 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005268 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5269 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005270 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005271 if (value == 0) {
5272 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5273 } else {
5274 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5275 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005276 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005277 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005278 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005279 }
5280 } else if (constant->IsLongConstant()) {
5281 int64_t value = constant->AsLongConstant()->GetValue();
5282 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005283 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005284 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005286 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005287 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005288 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005289 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005291 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005292 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005293 } else {
5294 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005295 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005296 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5297 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005298 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005299 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005300 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005301 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005302 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005303 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005304 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005305 } else {
5306 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005307 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005308 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005309 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005310 } else if (source.IsFpuRegister()) {
5311 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005312 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005313 } else if (destination.IsStackSlot()) {
5314 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005315 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005316 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005317 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005318 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005319 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005320 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005321 }
5322}
5323
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005324void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005325 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005326 __ movl(Address(CpuRegister(RSP), mem), reg);
5327 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005328}
5329
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005330void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005331 ScratchRegisterScope ensure_scratch(
5332 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5333
5334 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5335 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5336 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5337 Address(CpuRegister(RSP), mem2 + stack_offset));
5338 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5339 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5340 CpuRegister(ensure_scratch.GetRegister()));
5341}
5342
Mark Mendell8a1c7282015-06-29 15:41:28 -04005343void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5344 __ movq(CpuRegister(TMP), reg1);
5345 __ movq(reg1, reg2);
5346 __ movq(reg2, CpuRegister(TMP));
5347}
5348
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005349void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5350 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5351 __ movq(Address(CpuRegister(RSP), mem), reg);
5352 __ movq(reg, CpuRegister(TMP));
5353}
5354
5355void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5356 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005357 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005358
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005359 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5360 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5361 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5362 Address(CpuRegister(RSP), mem2 + stack_offset));
5363 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5364 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5365 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005366}
5367
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005368void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5369 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5370 __ movss(Address(CpuRegister(RSP), mem), reg);
5371 __ movd(reg, CpuRegister(TMP));
5372}
5373
5374void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5375 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5376 __ movsd(Address(CpuRegister(RSP), mem), reg);
5377 __ movd(reg, CpuRegister(TMP));
5378}
5379
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005380void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005381 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005382 Location source = move->GetSource();
5383 Location destination = move->GetDestination();
5384
5385 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005386 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005387 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005388 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005389 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005390 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005391 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005392 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5393 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005394 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005395 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005396 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005397 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5398 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005399 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005400 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5401 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5402 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005403 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005404 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005405 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005406 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005407 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005408 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005409 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005410 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005411 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005412 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005413 }
5414}
5415
5416
5417void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5418 __ pushq(CpuRegister(reg));
5419}
5420
5421
5422void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5423 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005424}
5425
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005426void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005427 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005428 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5429 Immediate(mirror::Class::kStatusInitialized));
5430 __ j(kLess, slow_path->GetEntryLabel());
5431 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005432 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005433}
5434
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005435HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5436 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005437 switch (desired_class_load_kind) {
5438 case HLoadClass::LoadKind::kReferrersClass:
5439 break;
5440 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5441 DCHECK(!GetCompilerOptions().GetCompilePic());
5442 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5443 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5444 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5445 DCHECK(GetCompilerOptions().GetCompilePic());
5446 break;
5447 case HLoadClass::LoadKind::kBootImageAddress:
5448 break;
5449 case HLoadClass::LoadKind::kDexCacheAddress:
5450 DCHECK(Runtime::Current()->UseJitCompilation());
5451 break;
5452 case HLoadClass::LoadKind::kDexCachePcRelative:
5453 DCHECK(!Runtime::Current()->UseJitCompilation());
5454 break;
5455 case HLoadClass::LoadKind::kDexCacheViaMethod:
5456 break;
5457 }
5458 return desired_class_load_kind;
5459}
5460
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005461void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005462 if (cls->NeedsAccessCheck()) {
5463 InvokeRuntimeCallingConvention calling_convention;
5464 CodeGenerator::CreateLoadClassLocationSummary(
5465 cls,
5466 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5467 Location::RegisterLocation(RAX),
5468 /* code_generator_supports_read_barrier */ true);
5469 return;
5470 }
5471
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005472 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5473 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005474 ? LocationSummary::kCallOnSlowPath
5475 : LocationSummary::kNoCall;
5476 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005477 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005478 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005479 }
5480
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005481 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5482 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5483 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5484 locations->SetInAt(0, Location::RequiresRegister());
5485 }
5486 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005487}
5488
5489void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005490 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005491 if (cls->NeedsAccessCheck()) {
5492 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01005493 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005494 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005495 return;
5496 }
5497
Roland Levillain0d5a2812015-11-13 10:07:31 +00005498 Location out_loc = locations->Out();
5499 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005500
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005501 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005502 bool generate_null_check = false;
5503 switch (cls->GetLoadKind()) {
5504 case HLoadClass::LoadKind::kReferrersClass: {
5505 DCHECK(!cls->CanCallRuntime());
5506 DCHECK(!cls->MustGenerateClinitCheck());
5507 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5508 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5509 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005510 cls,
5511 out_loc,
5512 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005513 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005514 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005515 break;
5516 }
5517 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005518 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005519 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5520 codegen_->RecordTypePatch(cls);
5521 break;
5522 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005523 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005524 DCHECK_NE(cls->GetAddress(), 0u);
5525 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5526 __ movl(out, Immediate(address)); // Zero-extended.
5527 codegen_->RecordSimplePatch();
5528 break;
5529 }
5530 case HLoadClass::LoadKind::kDexCacheAddress: {
5531 DCHECK_NE(cls->GetAddress(), 0u);
5532 // /* GcRoot<mirror::Class> */ out = *address
5533 if (IsUint<32>(cls->GetAddress())) {
5534 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005535 GenerateGcRootFieldLoad(cls,
5536 out_loc,
5537 address,
Roland Levillain00468f32016-10-27 18:02:48 +01005538 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005539 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005540 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005541 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5542 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005543 GenerateGcRootFieldLoad(cls,
5544 out_loc,
5545 Address(out, 0),
Roland Levillain00468f32016-10-27 18:02:48 +01005546 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005547 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005548 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005549 generate_null_check = !cls->IsInDexCache();
5550 break;
5551 }
5552 case HLoadClass::LoadKind::kDexCachePcRelative: {
5553 uint32_t offset = cls->GetDexCacheElementOffset();
5554 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5555 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5556 /* no_rip */ false);
5557 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005558 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005559 generate_null_check = !cls->IsInDexCache();
5560 break;
5561 }
5562 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5563 // /* GcRoot<mirror::Class>[] */ out =
5564 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5565 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5566 __ movq(out,
5567 Address(current_method,
5568 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5569 // /* GcRoot<mirror::Class> */ out = out[type_index]
5570 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005571 cls,
5572 out_loc,
5573 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
Roland Levillain00468f32016-10-27 18:02:48 +01005574 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005575 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005576 generate_null_check = !cls->IsInDexCache();
5577 break;
5578 }
5579 default:
5580 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5581 UNREACHABLE();
5582 }
5583
5584 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5585 DCHECK(cls->CanCallRuntime());
5586 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5587 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5588 codegen_->AddSlowPath(slow_path);
5589 if (generate_null_check) {
5590 __ testl(out, out);
5591 __ j(kEqual, slow_path->GetEntryLabel());
5592 }
5593 if (cls->MustGenerateClinitCheck()) {
5594 GenerateClassInitializationCheck(slow_path, out);
5595 } else {
5596 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005597 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005598 }
5599}
5600
5601void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5602 LocationSummary* locations =
5603 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5604 locations->SetInAt(0, Location::RequiresRegister());
5605 if (check->HasUses()) {
5606 locations->SetOut(Location::SameAsFirstInput());
5607 }
5608}
5609
5610void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005611 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005612 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005613 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005614 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005615 GenerateClassInitializationCheck(slow_path,
5616 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005617}
5618
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005619HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5620 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005621 switch (desired_string_load_kind) {
5622 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5623 DCHECK(!GetCompilerOptions().GetCompilePic());
5624 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5625 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5626 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5627 DCHECK(GetCompilerOptions().GetCompilePic());
5628 break;
5629 case HLoadString::LoadKind::kBootImageAddress:
5630 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005631 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005632 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005633 break;
5634 case HLoadString::LoadKind::kDexCacheViaMethod:
5635 break;
Nicolas Geoffrayac3ebc32016-10-05 13:13:50 +01005636 case HLoadString::LoadKind::kJitTableAddress:
5637 DCHECK(Runtime::Current()->UseJitCompilation());
5638 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005639 }
5640 return desired_string_load_kind;
5641}
5642
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005643void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005644 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
Vladimir Markoaad75c62016-10-03 08:46:48 +00005645 ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
5646 ? LocationSummary::kCallOnMainOnly
5647 : LocationSummary::kCallOnSlowPath)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005648 : LocationSummary::kNoCall;
5649 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005650 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005651 locations->SetOut(Location::RegisterLocation(RAX));
5652 } else {
5653 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005654 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5655 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5656 // Rely on the pResolveString and/or marking to save everything.
5657 // Custom calling convention: RAX serves as both input and output.
5658 RegisterSet caller_saves = RegisterSet::Empty();
5659 caller_saves.Add(Location::RegisterLocation(RAX));
5660 locations->SetCustomSlowPathCallerSaves(caller_saves);
5661 } else {
5662 // For non-Baker read barrier we have a temp-clobbering call.
5663 }
5664 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005665 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005666}
5667
Nicolas Geoffrayac3ebc32016-10-05 13:13:50 +01005668Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file, uint32_t dex_index) {
5669 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u);
5670 // Add a patch entry and return the label.
5671 jit_string_patches_.emplace_back(dex_file, dex_index);
5672 PatchInfo<Label>* info = &jit_string_patches_.back();
5673 return &info->label;
5674}
5675
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005676void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005677 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005678 Location out_loc = locations->Out();
5679 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005680
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005681 switch (load->GetLoadKind()) {
5682 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005683 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005684 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005685 return; // No dex cache slow path.
5686 }
5687 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005688 DCHECK_NE(load->GetAddress(), 0u);
5689 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5690 __ movl(out, Immediate(address)); // Zero-extended.
5691 codegen_->RecordSimplePatch();
5692 return; // No dex cache slow path.
5693 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005694 case HLoadString::LoadKind::kBssEntry: {
5695 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5696 /* no_rip */ false);
5697 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5698 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Roland Levillain00468f32016-10-27 18:02:48 +01005699 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005700 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5701 codegen_->AddSlowPath(slow_path);
5702 __ testl(out, out);
5703 __ j(kEqual, slow_path->GetEntryLabel());
5704 __ Bind(slow_path->GetExitLabel());
5705 return;
5706 }
Nicolas Geoffrayac3ebc32016-10-05 13:13:50 +01005707 case HLoadString::LoadKind::kJitTableAddress: {
5708 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5709 /* no_rip */ true);
5710 Label* fixup_label =
5711 codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex());
5712 // /* GcRoot<mirror::String> */ out = *address
5713 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
5714 return;
5715 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005716 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005717 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005718 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005719
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005720 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005721 // Custom calling convention: RAX serves as both input and output.
5722 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex()));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005723 codegen_->InvokeRuntime(kQuickResolveString,
5724 load,
5725 load->GetDexPc());
5726 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005727}
5728
David Brazdilcb1c0552015-08-04 16:22:25 +01005729static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005730 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005731 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005732}
5733
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005734void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5735 LocationSummary* locations =
5736 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5737 locations->SetOut(Location::RequiresRegister());
5738}
5739
5740void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005741 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5742}
5743
5744void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5745 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5746}
5747
5748void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5749 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005750}
5751
5752void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5753 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005754 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005755 InvokeRuntimeCallingConvention calling_convention;
5756 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5757}
5758
5759void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005760 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005761 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005762}
5763
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005764static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5765 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005766 // We need a temporary for holding the iftable length.
5767 return true;
5768 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005769 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005770 !kUseBakerReadBarrier &&
5771 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005772 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5773 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5774}
5775
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005776static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5777 return kEmitCompilerReadBarrier &&
5778 !kUseBakerReadBarrier &&
5779 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5780 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5781 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5782}
5783
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005784void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005785 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005786 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005787 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005788 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005789 case TypeCheckKind::kExactCheck:
5790 case TypeCheckKind::kAbstractClassCheck:
5791 case TypeCheckKind::kClassHierarchyCheck:
5792 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 call_kind =
5794 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005795 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 break;
5797 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005798 case TypeCheckKind::kUnresolvedCheck:
5799 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005800 call_kind = LocationSummary::kCallOnSlowPath;
5801 break;
5802 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005803
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005804 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005805 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005806 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005808 locations->SetInAt(0, Location::RequiresRegister());
5809 locations->SetInAt(1, Location::Any());
5810 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5811 locations->SetOut(Location::RequiresRegister());
5812 // When read barriers are enabled, we need a temporary register for
5813 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005814 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005815 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005816 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005817}
5818
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005819void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005820 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005821 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005822 Location obj_loc = locations->InAt(0);
5823 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005824 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005825 Location out_loc = locations->Out();
5826 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005827 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005828 locations->GetTemp(0) :
5829 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005830 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005831 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5832 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5833 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005834 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005835 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005836
5837 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005838 // Avoid null check if we know obj is not null.
5839 if (instruction->MustDoNullCheck()) {
5840 __ testl(obj, obj);
5841 __ j(kEqual, &zero);
5842 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005843
Roland Levillain0d5a2812015-11-13 10:07:31 +00005844 // /* HeapReference<Class> */ out = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005845 GenerateReferenceLoadTwoRegisters(instruction,
5846 out_loc,
5847 obj_loc,
5848 class_offset,
5849 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005851 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005852 case TypeCheckKind::kExactCheck: {
5853 if (cls.IsRegister()) {
5854 __ cmpl(out, cls.AsRegister<CpuRegister>());
5855 } else {
5856 DCHECK(cls.IsStackSlot()) << cls;
5857 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5858 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005859 if (zero.IsLinked()) {
5860 // Classes must be equal for the instanceof to succeed.
5861 __ j(kNotEqual, &zero);
5862 __ movl(out, Immediate(1));
5863 __ jmp(&done);
5864 } else {
5865 __ setcc(kEqual, out);
5866 // setcc only sets the low byte.
5867 __ andl(out, Immediate(1));
5868 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005869 break;
5870 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005871
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005872 case TypeCheckKind::kAbstractClassCheck: {
5873 // If the class is abstract, we eagerly fetch the super class of the
5874 // object to avoid doing a comparison we know will fail.
5875 NearLabel loop, success;
5876 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005877 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005878 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005879 __ testl(out, out);
5880 // If `out` is null, we use it for the result, and jump to `done`.
5881 __ j(kEqual, &done);
5882 if (cls.IsRegister()) {
5883 __ cmpl(out, cls.AsRegister<CpuRegister>());
5884 } else {
5885 DCHECK(cls.IsStackSlot()) << cls;
5886 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5887 }
5888 __ j(kNotEqual, &loop);
5889 __ movl(out, Immediate(1));
5890 if (zero.IsLinked()) {
5891 __ jmp(&done);
5892 }
5893 break;
5894 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005895
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005896 case TypeCheckKind::kClassHierarchyCheck: {
5897 // Walk over the class hierarchy to find a match.
5898 NearLabel loop, success;
5899 __ Bind(&loop);
5900 if (cls.IsRegister()) {
5901 __ cmpl(out, cls.AsRegister<CpuRegister>());
5902 } else {
5903 DCHECK(cls.IsStackSlot()) << cls;
5904 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5905 }
5906 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005907 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005908 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005909 __ testl(out, out);
5910 __ j(kNotEqual, &loop);
5911 // If `out` is null, we use it for the result, and jump to `done`.
5912 __ jmp(&done);
5913 __ Bind(&success);
5914 __ movl(out, Immediate(1));
5915 if (zero.IsLinked()) {
5916 __ jmp(&done);
5917 }
5918 break;
5919 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005920
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005921 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005922 // Do an exact check.
5923 NearLabel exact_check;
5924 if (cls.IsRegister()) {
5925 __ cmpl(out, cls.AsRegister<CpuRegister>());
5926 } else {
5927 DCHECK(cls.IsStackSlot()) << cls;
5928 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5929 }
5930 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005931 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005932 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005933 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005934 __ testl(out, out);
5935 // If `out` is null, we use it for the result, and jump to `done`.
5936 __ j(kEqual, &done);
5937 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5938 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005939 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005940 __ movl(out, Immediate(1));
5941 __ jmp(&done);
5942 break;
5943 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005944
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005945 case TypeCheckKind::kArrayCheck: {
5946 if (cls.IsRegister()) {
5947 __ cmpl(out, cls.AsRegister<CpuRegister>());
5948 } else {
5949 DCHECK(cls.IsStackSlot()) << cls;
5950 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5951 }
5952 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005953 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5954 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005955 codegen_->AddSlowPath(slow_path);
5956 __ j(kNotEqual, slow_path->GetEntryLabel());
5957 __ movl(out, Immediate(1));
5958 if (zero.IsLinked()) {
5959 __ jmp(&done);
5960 }
5961 break;
5962 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005963
Calin Juravle98893e12015-10-02 21:05:03 +01005964 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005965 case TypeCheckKind::kInterfaceCheck: {
5966 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005967 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005968 // cases.
5969 //
5970 // We cannot directly call the InstanceofNonTrivial runtime
5971 // entry point without resorting to a type checking slow path
5972 // here (i.e. by calling InvokeRuntime directly), as it would
5973 // require to assign fixed registers for the inputs of this
5974 // HInstanceOf instruction (following the runtime calling
5975 // convention), which might be cluttered by the potential first
5976 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005977 //
5978 // TODO: Introduce a new runtime entry point taking the object
5979 // to test (instead of its class) as argument, and let it deal
5980 // with the read barrier issues. This will let us refactor this
5981 // case of the `switch` code as it was previously (with a direct
5982 // call to the runtime not using a type checking slow path).
5983 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005984 DCHECK(locations->OnlyCallsOnSlowPath());
5985 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5986 /* is_fatal */ false);
5987 codegen_->AddSlowPath(slow_path);
5988 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005989 if (zero.IsLinked()) {
5990 __ jmp(&done);
5991 }
5992 break;
5993 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005994 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005995
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005996 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005997 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005998 __ xorl(out, out);
5999 }
6000
6001 if (done.IsLinked()) {
6002 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006003 }
6004
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006005 if (slow_path != nullptr) {
6006 __ Bind(slow_path->GetExitLabel());
6007 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006008}
6009
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006010static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006011 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006012 case TypeCheckKind::kExactCheck:
6013 case TypeCheckKind::kAbstractClassCheck:
6014 case TypeCheckKind::kClassHierarchyCheck:
6015 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006016 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006017 case TypeCheckKind::kInterfaceCheck:
6018 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006019 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006020 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006021 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006022 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006023 LOG(FATAL) << "Unreachable";
6024 UNREACHABLE();
6025}
6026
6027void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6028 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6029 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6030 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6031 LocationSummary::CallKind call_kind = is_fatal_slow_path
6032 ? LocationSummary::kNoCall
6033 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006034 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6035 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006036 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6037 // Require a register for the interface check since there is a loop that compares the class to
6038 // a memory address.
6039 locations->SetInAt(1, Location::RequiresRegister());
6040 } else {
6041 locations->SetInAt(1, Location::Any());
6042 }
6043
Roland Levillain0d5a2812015-11-13 10:07:31 +00006044 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6045 locations->AddTemp(Location::RequiresRegister());
6046 // When read barriers are enabled, we need an additional temporary
6047 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006048 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006049 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006050 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006051}
6052
6053void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006054 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006055 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006056 Location obj_loc = locations->InAt(0);
6057 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006058 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006059 Location temp_loc = locations->GetTemp(0);
6060 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006061 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006062 locations->GetTemp(1) :
6063 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006064 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6065 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6066 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6067 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6068 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6069 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006070 const uint32_t object_array_data_offset =
6071 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006072
Roland Levillain0d5a2812015-11-13 10:07:31 +00006073 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006074 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006075 SlowPathCode* type_check_slow_path =
6076 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6077 is_type_check_slow_path_fatal);
6078 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006079
Roland Levillain0d5a2812015-11-13 10:07:31 +00006080 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006081 case TypeCheckKind::kExactCheck:
6082 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006083 NearLabel done;
6084 // Avoid null check if we know obj is not null.
6085 if (instruction->MustDoNullCheck()) {
6086 __ testl(obj, obj);
6087 __ j(kEqual, &done);
6088 }
6089
6090 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006091 GenerateReferenceLoadTwoRegisters(instruction,
6092 temp_loc,
6093 obj_loc,
6094 class_offset,
6095 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006096 if (cls.IsRegister()) {
6097 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6098 } else {
6099 DCHECK(cls.IsStackSlot()) << cls;
6100 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6101 }
6102 // Jump to slow path for throwing the exception or doing a
6103 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006104 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006105 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006106 break;
6107 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006108
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006109 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006110 NearLabel done;
6111 // Avoid null check if we know obj is not null.
6112 if (instruction->MustDoNullCheck()) {
6113 __ testl(obj, obj);
6114 __ j(kEqual, &done);
6115 }
6116
6117 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006118 GenerateReferenceLoadTwoRegisters(instruction,
6119 temp_loc,
6120 obj_loc,
6121 class_offset,
6122 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006123 // If the class is abstract, we eagerly fetch the super class of the
6124 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006125 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006127 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006128 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006129
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006130 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6131 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006132 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006133 // Otherwise, compare the classes.
6134 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006135 if (cls.IsRegister()) {
6136 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6137 } else {
6138 DCHECK(cls.IsStackSlot()) << cls;
6139 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6140 }
6141 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00006142 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006143 break;
6144 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006145
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006146 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006147 NearLabel done;
6148 // Avoid null check if we know obj is not null.
6149 if (instruction->MustDoNullCheck()) {
6150 __ testl(obj, obj);
6151 __ j(kEqual, &done);
6152 }
6153
6154 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006155 GenerateReferenceLoadTwoRegisters(instruction,
6156 temp_loc,
6157 obj_loc,
6158 class_offset,
6159 kEmitCompilerReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006160 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006161 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006162 __ Bind(&loop);
6163 if (cls.IsRegister()) {
6164 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6165 } else {
6166 DCHECK(cls.IsStackSlot()) << cls;
6167 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6168 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006169 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006170
Roland Levillain0d5a2812015-11-13 10:07:31 +00006171 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006172 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006173
6174 // If the class reference currently in `temp` is not null, jump
6175 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006176 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006177 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006178 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006179 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006180 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006181 break;
6182 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006183
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006184 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006185 // We cannot use a NearLabel here, as its range might be too
6186 // short in some cases when read barriers are enabled. This has
6187 // been observed for instance when the code emitted for this
6188 // case uses high x86-64 registers (R8-R15).
6189 Label done;
6190 // Avoid null check if we know obj is not null.
6191 if (instruction->MustDoNullCheck()) {
6192 __ testl(obj, obj);
6193 __ j(kEqual, &done);
6194 }
6195
6196 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006197 GenerateReferenceLoadTwoRegisters(instruction,
6198 temp_loc,
6199 obj_loc,
6200 class_offset,
6201 kEmitCompilerReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006202 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006203 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006204 if (cls.IsRegister()) {
6205 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6206 } else {
6207 DCHECK(cls.IsStackSlot()) << cls;
6208 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6209 }
6210 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006211
6212 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006213 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006214 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006215
6216 // If the component type is not null (i.e. the object is indeed
6217 // an array), jump to label `check_non_primitive_component_type`
6218 // to further check that this component type is not a primitive
6219 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006220 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006221 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006222 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006223 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006224 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006225 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006226 break;
6227 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006228
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006229 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006230 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006231 //
6232 // We cannot directly call the CheckCast runtime entry point
6233 // without resorting to a type checking slow path here (i.e. by
6234 // calling InvokeRuntime directly), as it would require to
6235 // assign fixed registers for the inputs of this HInstanceOf
6236 // instruction (following the runtime calling convention), which
6237 // might be cluttered by the potential first read barrier
6238 // emission at the beginning of this method.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006239
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006240 NearLabel done;
6241 // Avoid null check if we know obj is not null.
6242 if (instruction->MustDoNullCheck()) {
6243 __ testl(obj, obj);
6244 __ j(kEqual, &done);
6245 }
6246 __ jmp(type_check_slow_path->GetEntryLabel());
6247 __ Bind(&done);
6248 break;
6249 }
6250
6251 case TypeCheckKind::kInterfaceCheck:
6252 NearLabel done;
6253
6254 // Avoid null check if we know obj is not null.
6255 if (instruction->MustDoNullCheck()) {
6256 __ testl(obj, obj);
6257 __ j(kEqual, &done);
6258 }
6259
6260 // Fast path for the interface check. We always go slow path for heap poisoning since
6261 // unpoisoning cls would require an extra temp.
6262 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006263 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6264 // doing this.
6265 // /* HeapReference<Class> */ temp = obj->klass_
6266 GenerateReferenceLoadTwoRegisters(instruction,
6267 temp_loc,
6268 obj_loc,
6269 class_offset,
6270 /*emit_read_barrier*/ false);
6271
6272 // /* HeapReference<Class> */ temp = temp->iftable_
6273 GenerateReferenceLoadTwoRegisters(instruction,
6274 temp_loc,
6275 temp_loc,
6276 iftable_offset,
6277 /*emit_read_barrier*/ false);
6278 NearLabel is_null;
6279 // Null iftable means it is empty.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006280 __ testl(temp, temp);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006281 __ j(kZero, &is_null);
6282
6283 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006284 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006285
6286 NearLabel start_loop;
6287 __ Bind(&start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006288 __ cmpl(cls.AsRegister<CpuRegister>(), Address(temp, object_array_data_offset));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006289 __ j(kEqual, &done); // Return if same class.
6290 // Go to next interface.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006291 __ addl(temp, Immediate(2 * kHeapReferenceSize));
6292 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006293 __ j(kNotZero, &start_loop);
6294 __ Bind(&is_null);
6295 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006296 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00006297 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006298 break;
6299 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006300
Roland Levillain0d5a2812015-11-13 10:07:31 +00006301 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006302}
6303
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006304void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6305 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006306 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006307 InvokeRuntimeCallingConvention calling_convention;
6308 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6309}
6310
6311void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006312 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006313 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006314 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006315 if (instruction->IsEnter()) {
6316 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6317 } else {
6318 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6319 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006320}
6321
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006322void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6323void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6324void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6325
6326void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6327 LocationSummary* locations =
6328 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6329 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6330 || instruction->GetResultType() == Primitive::kPrimLong);
6331 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006332 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006333 locations->SetOut(Location::SameAsFirstInput());
6334}
6335
6336void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6337 HandleBitwiseOperation(instruction);
6338}
6339
6340void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6341 HandleBitwiseOperation(instruction);
6342}
6343
6344void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6345 HandleBitwiseOperation(instruction);
6346}
6347
6348void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6349 LocationSummary* locations = instruction->GetLocations();
6350 Location first = locations->InAt(0);
6351 Location second = locations->InAt(1);
6352 DCHECK(first.Equals(locations->Out()));
6353
6354 if (instruction->GetResultType() == Primitive::kPrimInt) {
6355 if (second.IsRegister()) {
6356 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006357 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006358 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006359 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006360 } else {
6361 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006362 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006363 }
6364 } else if (second.IsConstant()) {
6365 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6366 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006367 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006368 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006369 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006370 } else {
6371 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006372 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006373 }
6374 } else {
6375 Address address(CpuRegister(RSP), second.GetStackIndex());
6376 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006377 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006378 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006379 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006380 } else {
6381 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006382 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006383 }
6384 }
6385 } else {
6386 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006387 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6388 bool second_is_constant = false;
6389 int64_t value = 0;
6390 if (second.IsConstant()) {
6391 second_is_constant = true;
6392 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006393 }
Mark Mendell40741f32015-04-20 22:10:34 -04006394 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006395
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006396 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006397 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006398 if (is_int32_value) {
6399 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6400 } else {
6401 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6402 }
6403 } else if (second.IsDoubleStackSlot()) {
6404 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006405 } else {
6406 __ andq(first_reg, second.AsRegister<CpuRegister>());
6407 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006408 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006409 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006410 if (is_int32_value) {
6411 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6412 } else {
6413 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6414 }
6415 } else if (second.IsDoubleStackSlot()) {
6416 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006417 } else {
6418 __ orq(first_reg, second.AsRegister<CpuRegister>());
6419 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006420 } else {
6421 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006422 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006423 if (is_int32_value) {
6424 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6425 } else {
6426 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6427 }
6428 } else if (second.IsDoubleStackSlot()) {
6429 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006430 } else {
6431 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6432 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006433 }
6434 }
6435}
6436
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006437void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6438 Location out,
6439 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006440 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006441 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6442 if (kEmitCompilerReadBarrier) {
6443 if (kUseBakerReadBarrier) {
6444 // Load with fast path based Baker's read barrier.
6445 // /* HeapReference<Object> */ out = *(out + offset)
6446 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006447 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006448 } else {
6449 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006450 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006451 // in the following move operation, as we will need it for the
6452 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006453 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006454 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006455 // /* HeapReference<Object> */ out = *(out + offset)
6456 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006457 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006458 }
6459 } else {
6460 // Plain load with no read barrier.
6461 // /* HeapReference<Object> */ out = *(out + offset)
6462 __ movl(out_reg, Address(out_reg, offset));
6463 __ MaybeUnpoisonHeapReference(out_reg);
6464 }
6465}
6466
6467void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6468 Location out,
6469 Location obj,
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006470 uint32_t offset,
6471 bool emit_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006472 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6473 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006474 if (emit_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006475 if (kUseBakerReadBarrier) {
6476 // Load with fast path based Baker's read barrier.
6477 // /* HeapReference<Object> */ out = *(obj + offset)
6478 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006479 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006480 } else {
6481 // Load with slow path based read barrier.
6482 // /* HeapReference<Object> */ out = *(obj + offset)
6483 __ movl(out_reg, Address(obj_reg, offset));
6484 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6485 }
6486 } else {
6487 // Plain load with no read barrier.
6488 // /* HeapReference<Object> */ out = *(obj + offset)
6489 __ movl(out_reg, Address(obj_reg, offset));
6490 __ MaybeUnpoisonHeapReference(out_reg);
6491 }
6492}
6493
6494void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6495 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006496 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006497 Label* fixup_label,
6498 bool requires_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006499 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006500 if (requires_read_barrier) {
6501 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006502 if (kUseBakerReadBarrier) {
6503 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6504 // Baker's read barrier are used:
6505 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006506 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006507 // if (Thread::Current()->GetIsGcMarking()) {
6508 // root = ReadBarrier::Mark(root)
6509 // }
6510
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006511 // /* GcRoot<mirror::Object> */ root = *address
6512 __ movl(root_reg, address);
6513 if (fixup_label != nullptr) {
6514 __ Bind(fixup_label);
6515 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006516 static_assert(
6517 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6518 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6519 "have different sizes.");
6520 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6521 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6522 "have different sizes.");
6523
Vladimir Marko953437b2016-08-24 08:30:46 +00006524 // Slow path marking the GC root `root`.
6525 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006526 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006527 codegen_->AddSlowPath(slow_path);
6528
Andreas Gampe542451c2016-07-26 09:02:02 -07006529 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006530 /* no_rip */ true),
6531 Immediate(0));
6532 __ j(kNotEqual, slow_path->GetEntryLabel());
6533 __ Bind(slow_path->GetExitLabel());
6534 } else {
6535 // GC root loaded through a slow path for read barriers other
6536 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006537 // /* GcRoot<mirror::Object>* */ root = address
6538 __ leaq(root_reg, address);
6539 if (fixup_label != nullptr) {
6540 __ Bind(fixup_label);
6541 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006542 // /* mirror::Object* */ root = root->Read()
6543 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6544 }
6545 } else {
6546 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006547 // /* GcRoot<mirror::Object> */ root = *address
6548 __ movl(root_reg, address);
6549 if (fixup_label != nullptr) {
6550 __ Bind(fixup_label);
6551 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006552 // Note that GC roots are not affected by heap poisoning, thus we
6553 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006554 }
6555}
6556
6557void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6558 Location ref,
6559 CpuRegister obj,
6560 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006561 bool needs_null_check) {
6562 DCHECK(kEmitCompilerReadBarrier);
6563 DCHECK(kUseBakerReadBarrier);
6564
6565 // /* HeapReference<Object> */ ref = *(obj + offset)
6566 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006567 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006568}
6569
6570void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6571 Location ref,
6572 CpuRegister obj,
6573 uint32_t data_offset,
6574 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006575 bool needs_null_check) {
6576 DCHECK(kEmitCompilerReadBarrier);
6577 DCHECK(kUseBakerReadBarrier);
6578
Roland Levillain3d312422016-06-23 13:53:42 +01006579 static_assert(
6580 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6581 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582 // /* HeapReference<Object> */ ref =
6583 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006584 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006585 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006586}
6587
6588void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6589 Location ref,
6590 CpuRegister obj,
6591 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006592 bool needs_null_check,
6593 bool always_update_field,
6594 CpuRegister* temp1,
6595 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006596 DCHECK(kEmitCompilerReadBarrier);
6597 DCHECK(kUseBakerReadBarrier);
6598
6599 // In slow path based read barriers, the read barrier call is
6600 // inserted after the original load. However, in fast path based
6601 // Baker's read barriers, we need to perform the load of
6602 // mirror::Object::monitor_ *before* the original reference load.
6603 // This load-load ordering is required by the read barrier.
6604 // The fast path/slow path (for Baker's algorithm) should look like:
6605 //
6606 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6607 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6608 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006609 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006610 // if (is_gray) {
6611 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6612 // }
6613 //
6614 // Note: the original implementation in ReadBarrier::Barrier is
6615 // slightly more complex as:
6616 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006617 // the high-bits of rb_state, which are expected to be all zeroes
6618 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6619 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006620 // - it performs additional checks that we do not do here for
6621 // performance reasons.
6622
6623 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006624 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6625
Vladimir Marko953437b2016-08-24 08:30:46 +00006626 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006627 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6628 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006629 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6630 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6631 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6632
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006633 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006634 // ref = ReadBarrier::Mark(ref);
6635 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6636 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006637 if (needs_null_check) {
6638 MaybeRecordImplicitNullCheck(instruction);
6639 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006640
6641 // Load fence to prevent load-load reordering.
6642 // Note that this is a no-op, thanks to the x86-64 memory model.
6643 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6644
6645 // The actual reference load.
6646 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006647 __ movl(ref_reg, src); // Flags are unaffected.
6648
6649 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6650 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006651 SlowPathCode* slow_path;
6652 if (always_update_field) {
6653 DCHECK(temp1 != nullptr);
6654 DCHECK(temp2 != nullptr);
6655 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6656 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6657 } else {
6658 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6659 instruction, ref, /* unpoison_ref_before_marking */ true);
6660 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006661 AddSlowPath(slow_path);
6662
6663 // We have done the "if" of the gray bit check above, now branch based on the flags.
6664 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006665
6666 // Object* ref = ref_addr->AsMirrorPtr()
6667 __ MaybeUnpoisonHeapReference(ref_reg);
6668
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006669 __ Bind(slow_path->GetExitLabel());
6670}
6671
6672void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6673 Location out,
6674 Location ref,
6675 Location obj,
6676 uint32_t offset,
6677 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006678 DCHECK(kEmitCompilerReadBarrier);
6679
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006680 // Insert a slow path based read barrier *after* the reference load.
6681 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006682 // If heap poisoning is enabled, the unpoisoning of the loaded
6683 // reference will be carried out by the runtime within the slow
6684 // path.
6685 //
6686 // Note that `ref` currently does not get unpoisoned (when heap
6687 // poisoning is enabled), which is alright as the `ref` argument is
6688 // not used by the artReadBarrierSlow entry point.
6689 //
6690 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6691 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6692 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6693 AddSlowPath(slow_path);
6694
Roland Levillain0d5a2812015-11-13 10:07:31 +00006695 __ jmp(slow_path->GetEntryLabel());
6696 __ Bind(slow_path->GetExitLabel());
6697}
6698
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006699void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6700 Location out,
6701 Location ref,
6702 Location obj,
6703 uint32_t offset,
6704 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006705 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006706 // Baker's read barriers shall be handled by the fast path
6707 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6708 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006709 // If heap poisoning is enabled, unpoisoning will be taken care of
6710 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006711 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006712 } else if (kPoisonHeapReferences) {
6713 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6714 }
6715}
6716
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006717void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6718 Location out,
6719 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006720 DCHECK(kEmitCompilerReadBarrier);
6721
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006722 // Insert a slow path based read barrier *after* the GC root load.
6723 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006724 // Note that GC roots are not affected by heap poisoning, so we do
6725 // not need to do anything special for this here.
6726 SlowPathCode* slow_path =
6727 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6728 AddSlowPath(slow_path);
6729
Roland Levillain0d5a2812015-11-13 10:07:31 +00006730 __ jmp(slow_path->GetEntryLabel());
6731 __ Bind(slow_path->GetExitLabel());
6732}
6733
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006734void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006735 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006736 LOG(FATAL) << "Unreachable";
6737}
6738
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006739void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006740 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006741 LOG(FATAL) << "Unreachable";
6742}
6743
Mark Mendellfe57faa2015-09-18 09:26:15 -04006744// Simple implementation of packed switch - generate cascaded compare/jumps.
6745void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6746 LocationSummary* locations =
6747 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6748 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006749 locations->AddTemp(Location::RequiresRegister());
6750 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006751}
6752
6753void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6754 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006755 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006756 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006757 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6758 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6759 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006760 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6761
6762 // Should we generate smaller inline compare/jumps?
6763 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6764 // Figure out the correct compare values and jump conditions.
6765 // Handle the first compare/branch as a special case because it might
6766 // jump to the default case.
6767 DCHECK_GT(num_entries, 2u);
6768 Condition first_condition;
6769 uint32_t index;
6770 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6771 if (lower_bound != 0) {
6772 first_condition = kLess;
6773 __ cmpl(value_reg_in, Immediate(lower_bound));
6774 __ j(first_condition, codegen_->GetLabelOf(default_block));
6775 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6776
6777 index = 1;
6778 } else {
6779 // Handle all the compare/jumps below.
6780 first_condition = kBelow;
6781 index = 0;
6782 }
6783
6784 // Handle the rest of the compare/jumps.
6785 for (; index + 1 < num_entries; index += 2) {
6786 int32_t compare_to_value = lower_bound + index + 1;
6787 __ cmpl(value_reg_in, Immediate(compare_to_value));
6788 // Jump to successors[index] if value < case_value[index].
6789 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6790 // Jump to successors[index + 1] if value == case_value[index + 1].
6791 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6792 }
6793
6794 if (index != num_entries) {
6795 // There are an odd number of entries. Handle the last one.
6796 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006797 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006798 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6799 }
6800
6801 // And the default for any other value.
6802 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6803 __ jmp(codegen_->GetLabelOf(default_block));
6804 }
6805 return;
6806 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006807
6808 // Remove the bias, if needed.
6809 Register value_reg_out = value_reg_in.AsRegister();
6810 if (lower_bound != 0) {
6811 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6812 value_reg_out = temp_reg.AsRegister();
6813 }
6814 CpuRegister value_reg(value_reg_out);
6815
6816 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006817 __ cmpl(value_reg, Immediate(num_entries - 1));
6818 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006819
Mark Mendell9c86b482015-09-18 13:36:07 -04006820 // We are in the range of the table.
6821 // Load the address of the jump table in the constant area.
6822 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006823
Mark Mendell9c86b482015-09-18 13:36:07 -04006824 // Load the (signed) offset from the jump table.
6825 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6826
6827 // Add the offset to the address of the table base.
6828 __ addq(temp_reg, base_reg);
6829
6830 // And jump.
6831 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006832}
6833
Aart Bikc5d47542016-01-27 17:00:35 -08006834void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6835 if (value == 0) {
6836 __ xorl(dest, dest);
6837 } else {
6838 __ movl(dest, Immediate(value));
6839 }
6840}
6841
Mark Mendell92e83bf2015-05-07 11:25:03 -04006842void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6843 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006844 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006845 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006846 } else if (IsUint<32>(value)) {
6847 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006848 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6849 } else {
6850 __ movq(dest, Immediate(value));
6851 }
6852}
6853
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006854void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6855 if (value == 0) {
6856 __ xorps(dest, dest);
6857 } else {
6858 __ movss(dest, LiteralInt32Address(value));
6859 }
6860}
6861
6862void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6863 if (value == 0) {
6864 __ xorpd(dest, dest);
6865 } else {
6866 __ movsd(dest, LiteralInt64Address(value));
6867 }
6868}
6869
6870void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6871 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6872}
6873
6874void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6875 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6876}
6877
Aart Bika19616e2016-02-01 18:57:58 -08006878void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6879 if (value == 0) {
6880 __ testl(dest, dest);
6881 } else {
6882 __ cmpl(dest, Immediate(value));
6883 }
6884}
6885
6886void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6887 if (IsInt<32>(value)) {
6888 if (value == 0) {
6889 __ testq(dest, dest);
6890 } else {
6891 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6892 }
6893 } else {
6894 // Value won't fit in an int.
6895 __ cmpq(dest, LiteralInt64Address(value));
6896 }
6897}
6898
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006899void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6900 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006901 GenerateIntCompare(lhs_reg, rhs);
6902}
6903
6904void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006905 if (rhs.IsConstant()) {
6906 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006907 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006908 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006909 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006910 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006911 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006912 }
6913}
6914
6915void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6916 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6917 if (rhs.IsConstant()) {
6918 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6919 Compare64BitValue(lhs_reg, value);
6920 } else if (rhs.IsDoubleStackSlot()) {
6921 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6922 } else {
6923 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6924 }
6925}
6926
6927Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6928 Location index,
6929 ScaleFactor scale,
6930 uint32_t data_offset) {
6931 return index.IsConstant() ?
6932 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6933 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6934}
6935
Mark Mendellcfa410b2015-05-25 16:02:44 -04006936void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6937 DCHECK(dest.IsDoubleStackSlot());
6938 if (IsInt<32>(value)) {
6939 // Can move directly as an int32 constant.
6940 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6941 Immediate(static_cast<int32_t>(value)));
6942 } else {
6943 Load64BitValue(CpuRegister(TMP), value);
6944 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6945 }
6946}
6947
Mark Mendell9c86b482015-09-18 13:36:07 -04006948/**
6949 * Class to handle late fixup of offsets into constant area.
6950 */
6951class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6952 public:
6953 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6954 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6955
6956 protected:
6957 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6958
6959 CodeGeneratorX86_64* codegen_;
6960
6961 private:
6962 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6963 // Patch the correct offset for the instruction. We use the address of the
6964 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6965 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6966 int32_t relative_position = constant_offset - pos;
6967
6968 // Patch in the right value.
6969 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6970 }
6971
6972 // Location in constant area that the fixup refers to.
6973 size_t offset_into_constant_area_;
6974};
6975
6976/**
6977 t * Class to handle late fixup of offsets to a jump table that will be created in the
6978 * constant area.
6979 */
6980class JumpTableRIPFixup : public RIPFixup {
6981 public:
6982 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6983 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6984
6985 void CreateJumpTable() {
6986 X86_64Assembler* assembler = codegen_->GetAssembler();
6987
6988 // Ensure that the reference to the jump table has the correct offset.
6989 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6990 SetOffset(offset_in_constant_table);
6991
6992 // Compute the offset from the start of the function to this jump table.
6993 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6994
6995 // Populate the jump table with the correct values for the jump table.
6996 int32_t num_entries = switch_instr_->GetNumEntries();
6997 HBasicBlock* block = switch_instr_->GetBlock();
6998 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6999 // The value that we want is the target offset - the position of the table.
7000 for (int32_t i = 0; i < num_entries; i++) {
7001 HBasicBlock* b = successors[i];
7002 Label* l = codegen_->GetLabelOf(b);
7003 DCHECK(l->IsBound());
7004 int32_t offset_to_block = l->Position() - current_table_offset;
7005 assembler->AppendInt32(offset_to_block);
7006 }
7007 }
7008
7009 private:
7010 const HPackedSwitch* switch_instr_;
7011};
7012
Mark Mendellf55c3e02015-03-26 21:07:46 -04007013void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7014 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007015 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007016 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7017 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007018 assembler->Align(4, 0);
7019 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007020
7021 // Populate any jump tables.
7022 for (auto jump_table : fixups_to_jump_tables_) {
7023 jump_table->CreateJumpTable();
7024 }
7025
7026 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007027 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007028 }
7029
7030 // And finish up.
7031 CodeGenerator::Finalize(allocator);
7032}
7033
Mark Mendellf55c3e02015-03-26 21:07:46 -04007034Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7035 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7036 return Address::RIP(fixup);
7037}
7038
7039Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7040 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7041 return Address::RIP(fixup);
7042}
7043
7044Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7045 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7046 return Address::RIP(fixup);
7047}
7048
7049Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7050 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7051 return Address::RIP(fixup);
7052}
7053
Andreas Gampe85b62f22015-09-09 13:15:38 -07007054// TODO: trg as memory.
7055void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7056 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007057 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007058 return;
7059 }
7060
7061 DCHECK_NE(type, Primitive::kPrimVoid);
7062
7063 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7064 if (trg.Equals(return_loc)) {
7065 return;
7066 }
7067
7068 // Let the parallel move resolver take care of all of this.
7069 HParallelMove parallel_move(GetGraph()->GetArena());
7070 parallel_move.AddMove(return_loc, trg, type, nullptr);
7071 GetMoveResolver()->EmitNativeCode(&parallel_move);
7072}
7073
Mark Mendell9c86b482015-09-18 13:36:07 -04007074Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7075 // Create a fixup to be used to create and address the jump table.
7076 JumpTableRIPFixup* table_fixup =
7077 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7078
7079 // We have to populate the jump tables.
7080 fixups_to_jump_tables_.push_back(table_fixup);
7081 return Address::RIP(table_fixup);
7082}
7083
Mark Mendellea5af682015-10-22 17:35:49 -04007084void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7085 const Address& addr_high,
7086 int64_t v,
7087 HInstruction* instruction) {
7088 if (IsInt<32>(v)) {
7089 int32_t v_32 = v;
7090 __ movq(addr_low, Immediate(v_32));
7091 MaybeRecordImplicitNullCheck(instruction);
7092 } else {
7093 // Didn't fit in a register. Do it in pieces.
7094 int32_t low_v = Low32Bits(v);
7095 int32_t high_v = High32Bits(v);
7096 __ movl(addr_low, Immediate(low_v));
7097 MaybeRecordImplicitNullCheck(instruction);
7098 __ movl(addr_high, Immediate(high_v));
7099 }
7100}
7101
Nicolas Geoffrayac3ebc32016-10-05 13:13:50 +01007102void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7103 for (const PatchInfo<Label>& info : jit_string_patches_) {
7104 const auto& it = jit_string_roots_.find(StringReference(&info.dex_file, info.index));
7105 DCHECK(it != jit_string_roots_.end());
7106 size_t index_in_table = it->second;
7107 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7108 uintptr_t address =
7109 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7110 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7111 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7112 dchecked_integral_cast<uint32_t>(address);
7113 }
7114}
7115
Roland Levillain4d027112015-07-01 15:41:14 +01007116#undef __
7117
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007118} // namespace x86_64
7119} // namespace art