blob: 7dfc736d9c69876a287a5721637a2a550e70f79f [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100202 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
250 Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100251 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000252 at_,
253 dex_pc_,
254 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000255 if (do_clinit_) {
256 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
257 } else {
258 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000261 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 if (out.IsValid()) {
264 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000265 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 }
267
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ jmp(GetExitLabel());
270 }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
273
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100274 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // The class this slow path will load.
276 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100277
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000278 // The instruction where this slow path is happening.
279 // (Might be the load class or an initialization check).
280 HInstruction* const at_;
281
282 // The dex PC of `at_`.
283 const uint32_t dex_pc_;
284
285 // Whether to initialize the class.
286 const bool do_clinit_;
287
288 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289};
290
Vladimir Markoaad75c62016-10-03 08:46:48 +0000291class LoadStringSlowPathX86_64 : public SlowPathCode {
292 public:
293 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
294
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296 LocationSummary* locations = instruction_->GetLocations();
297 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
298
299 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
300 __ Bind(GetEntryLabel());
301 SaveLiveRegisters(codegen, locations);
302
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800303 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100304 // Custom calling convention: RAX serves as both input and output.
305 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000306 x86_64_codegen->InvokeRuntime(kQuickResolveString,
307 instruction_,
308 instruction_->GetDexPc(),
309 this);
310 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
311 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
312 RestoreLiveRegisters(codegen, locations);
313
314 // Store the resolved String to the BSS entry.
315 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
316 locations->Out().AsRegister<CpuRegister>());
317 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
318 __ Bind(fixup_label);
319
320 __ jmp(GetExitLabel());
321 }
322
323 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
324
325 private:
326 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
327};
328
Andreas Gampe85b62f22015-09-09 13:15:38 -0700329class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100336 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000337 DCHECK(instruction_->IsCheckCast()
338 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Roland Levillain0d5a2812015-11-13 10:07:31 +0000340 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 if (!is_fatal_) {
344 SaveLiveRegisters(codegen, locations);
345 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
347 // We're moving two locations to locations that could overlap, so we need a parallel
348 // move resolver.
349 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
352 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800353 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800354 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
355 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100357 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
362 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100393 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100432 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000433 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100434 RestoreLiveRegisters(codegen, locations);
435 __ jmp(GetExitLabel());
436 }
437
438 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
439
440 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
442};
443
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100444// Slow path marking an object reference `ref` during a read
445// barrier. The field `obj.field` in the object `obj` holding this
446// reference does not get updated by this slow path after marking (see
447// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
448//
449// This means that after the execution of this slow path, `ref` will
450// always be up-to-date, but `obj.field` may not; i.e., after the
451// flip, `ref` will be a to-space reference, but `obj.field` will
452// probably still be a from-space reference (unless it gets updated by
453// another thread, or if another thread installed another object
454// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000455class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
456 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
458 Location ref,
459 bool unpoison_ref_before_marking)
460 : SlowPathCode(instruction),
461 ref_(ref),
462 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000463 DCHECK(kEmitCompilerReadBarrier);
464 }
465
466 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
467
468 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
469 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100470 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
471 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000472 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100473 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474 DCHECK(instruction_->IsInstanceFieldGet() ||
475 instruction_->IsStaticFieldGet() ||
476 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100477 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000478 instruction_->IsLoadClass() ||
479 instruction_->IsLoadString() ||
480 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100481 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100482 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
483 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 << "Unexpected instruction in read barrier marking slow path: "
485 << instruction_->DebugName();
486
487 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100488 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000489 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100490 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000491 }
Roland Levillain4359e612016-07-20 11:32:19 +0100492 // No need to save live registers; it's taken care of by the
493 // entrypoint. Also, there is no need to update the stack mask,
494 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100496 DCHECK_NE(ref_reg, RSP);
497 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100498 // "Compact" slow path, saving two moves.
499 //
500 // Instead of using the standard runtime calling convention (input
501 // and output in R0):
502 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100504 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100505 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100506 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // of a dedicated entrypoint:
509 //
510 // rX <- ReadBarrierMarkRegX(rX)
511 //
512 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 // This runtime call does not require a stack map.
515 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 __ jmp(GetExitLabel());
517 }
518
519 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 // The location (register) of the marked object reference.
521 const Location ref_;
522 // Should the reference in `ref_` be unpoisoned prior to marking it?
523 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000524
525 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
526};
527
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528// Slow path marking an object reference `ref` during a read barrier,
529// and if needed, atomically updating the field `obj.field` in the
530// object `obj` holding this reference after marking (contrary to
531// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
532// `obj.field`).
533//
534// This means that after the execution of this slow path, both `ref`
535// and `obj.field` will be up-to-date; i.e., after the flip, both will
536// hold the same to-space reference (unless another thread installed
537// another object reference (different from `ref`) in `obj.field`).
538class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
539 public:
540 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
541 Location ref,
542 CpuRegister obj,
543 const Address& field_addr,
544 bool unpoison_ref_before_marking,
545 CpuRegister temp1,
546 CpuRegister temp2)
547 : SlowPathCode(instruction),
548 ref_(ref),
549 obj_(obj),
550 field_addr_(field_addr),
551 unpoison_ref_before_marking_(unpoison_ref_before_marking),
552 temp1_(temp1),
553 temp2_(temp2) {
554 DCHECK(kEmitCompilerReadBarrier);
555 }
556
557 const char* GetDescription() const OVERRIDE {
558 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
559 }
560
561 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
562 LocationSummary* locations = instruction_->GetLocations();
563 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
564 Register ref_reg = ref_cpu_reg.AsRegister();
565 DCHECK(locations->CanCall());
566 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
567 // This slow path is only used by the UnsafeCASObject intrinsic.
568 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
569 << "Unexpected instruction in read barrier marking and field updating slow path: "
570 << instruction_->DebugName();
571 DCHECK(instruction_->GetLocations()->Intrinsified());
572 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
573
574 __ Bind(GetEntryLabel());
575 if (unpoison_ref_before_marking_) {
576 // Object* ref = ref_addr->AsMirrorPtr()
577 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
578 }
579
580 // Save the old (unpoisoned) reference.
581 __ movl(temp1_, ref_cpu_reg);
582
583 // No need to save live registers; it's taken care of by the
584 // entrypoint. Also, there is no need to update the stack mask,
585 // as this runtime call will not trigger a garbage collection.
586 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
587 DCHECK_NE(ref_reg, RSP);
588 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
589 // "Compact" slow path, saving two moves.
590 //
591 // Instead of using the standard runtime calling convention (input
592 // and output in R0):
593 //
594 // RDI <- ref
595 // RAX <- ReadBarrierMark(RDI)
596 // ref <- RAX
597 //
598 // we just use rX (the register containing `ref`) as input and output
599 // of a dedicated entrypoint:
600 //
601 // rX <- ReadBarrierMarkRegX(rX)
602 //
603 int32_t entry_point_offset =
604 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
605 // This runtime call does not require a stack map.
606 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp1_, ref_cpu_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achived
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save RAX beforehand, and move the
627 // expected value (stored in `temp1_`) into EAX.
628 __ movq(temp2_, CpuRegister(RAX));
629 __ movl(CpuRegister(RAX), temp1_);
630
631 // Convenience aliases.
632 CpuRegister base = obj_;
633 CpuRegister expected = CpuRegister(RAX);
634 CpuRegister value = ref_cpu_reg;
635
636 bool base_equals_value = (base.AsRegister() == value.AsRegister());
637 Register value_reg = ref_reg;
638 if (kPoisonHeapReferences) {
639 if (base_equals_value) {
640 // If `base` and `value` are the same register location, move
641 // `value_reg` to a temporary register. This way, poisoning
642 // `value_reg` won't invalidate `base`.
643 value_reg = temp1_.AsRegister();
644 __ movl(CpuRegister(value_reg), base);
645 }
646
647 // Check that the register allocator did not assign the location
648 // of `expected` (RAX) to `value` nor to `base`, so that heap
649 // poisoning (when enabled) works as intended below.
650 // - If `value` were equal to `expected`, both references would
651 // be poisoned twice, meaning they would not be poisoned at
652 // all, as heap poisoning uses address negation.
653 // - If `base` were equal to `expected`, poisoning `expected`
654 // would invalidate `base`.
655 DCHECK_NE(value_reg, expected.AsRegister());
656 DCHECK_NE(base.AsRegister(), expected.AsRegister());
657
658 __ PoisonHeapReference(expected);
659 __ PoisonHeapReference(CpuRegister(value_reg));
660 }
661
662 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
663
664 // If heap poisoning is enabled, we need to unpoison the values
665 // that were poisoned earlier.
666 if (kPoisonHeapReferences) {
667 if (base_equals_value) {
668 // `value_reg` has been moved to a temporary register, no need
669 // to unpoison it.
670 } else {
671 __ UnpoisonHeapReference(CpuRegister(value_reg));
672 }
673 // No need to unpoison `expected` (RAX), as it is be overwritten below.
674 }
675
676 // Restore RAX.
677 __ movq(CpuRegister(RAX), temp2_);
678
679 __ Bind(&done);
680 __ jmp(GetExitLabel());
681 }
682
683 private:
684 // The location (register) of the marked object reference.
685 const Location ref_;
686 // The register containing the object holding the marked object reference field.
687 const CpuRegister obj_;
688 // The address of the marked reference field. The base of this address must be `obj_`.
689 const Address field_addr_;
690
691 // Should the reference in `ref_` be unpoisoned prior to marking it?
692 const bool unpoison_ref_before_marking_;
693
694 const CpuRegister temp1_;
695 const CpuRegister temp2_;
696
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
698};
699
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700// Slow path generating a read barrier for a heap reference.
701class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
702 public:
703 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
704 Location out,
705 Location ref,
706 Location obj,
707 uint32_t offset,
708 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000709 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710 out_(out),
711 ref_(ref),
712 obj_(obj),
713 offset_(offset),
714 index_(index) {
715 DCHECK(kEmitCompilerReadBarrier);
716 // If `obj` is equal to `out` or `ref`, it means the initial
717 // object has been overwritten by (or after) the heap object
718 // reference load to be instrumented, e.g.:
719 //
720 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000721 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000722 //
723 // In that case, we have lost the information about the original
724 // object, and the emitted read barrier cannot work properly.
725 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
726 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
727}
728
729 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
730 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
731 LocationSummary* locations = instruction_->GetLocations();
732 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
733 DCHECK(locations->CanCall());
734 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100735 DCHECK(instruction_->IsInstanceFieldGet() ||
736 instruction_->IsStaticFieldGet() ||
737 instruction_->IsArrayGet() ||
738 instruction_->IsInstanceOf() ||
739 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100740 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000741 << "Unexpected instruction in read barrier for heap reference slow path: "
742 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743
744 __ Bind(GetEntryLabel());
745 SaveLiveRegisters(codegen, locations);
746
747 // We may have to change the index's value, but as `index_` is a
748 // constant member (like other "inputs" of this slow path),
749 // introduce a copy of it, `index`.
750 Location index = index_;
751 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100752 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753 if (instruction_->IsArrayGet()) {
754 // Compute real offset and store it in index_.
755 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
756 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
757 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
758 // We are about to change the value of `index_reg` (see the
759 // calls to art::x86_64::X86_64Assembler::shll and
760 // art::x86_64::X86_64Assembler::AddImmediate below), but it
761 // has not been saved by the previous call to
762 // art::SlowPathCode::SaveLiveRegisters, as it is a
763 // callee-save register --
764 // art::SlowPathCode::SaveLiveRegisters does not consider
765 // callee-save registers, as it has been designed with the
766 // assumption that callee-save registers are supposed to be
767 // handled by the called function. So, as a callee-save
768 // register, `index_reg` _would_ eventually be saved onto
769 // the stack, but it would be too late: we would have
770 // changed its value earlier. Therefore, we manually save
771 // it here into another freely available register,
772 // `free_reg`, chosen of course among the caller-save
773 // registers (as a callee-save `free_reg` register would
774 // exhibit the same problem).
775 //
776 // Note we could have requested a temporary register from
777 // the register allocator instead; but we prefer not to, as
778 // this is a slow path, and we know we can find a
779 // caller-save register that is available.
780 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
781 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
782 index_reg = free_reg;
783 index = Location::RegisterLocation(index_reg);
784 } else {
785 // The initial register stored in `index_` has already been
786 // saved in the call to art::SlowPathCode::SaveLiveRegisters
787 // (as it is not a callee-save register), so we can freely
788 // use it.
789 }
790 // Shifting the index value contained in `index_reg` by the
791 // scale factor (2) cannot overflow in practice, as the
792 // runtime is unable to allocate object arrays with a size
793 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
794 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
795 static_assert(
796 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
797 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
798 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
799 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100800 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
801 // intrinsics, `index_` is not shifted by a scale factor of 2
802 // (as in the case of ArrayGet), as it is actually an offset
803 // to an object field within an object.
804 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000805 DCHECK(instruction_->GetLocations()->Intrinsified());
806 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
807 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
808 << instruction_->AsInvoke()->GetIntrinsic();
809 DCHECK_EQ(offset_, 0U);
810 DCHECK(index_.IsRegister());
811 }
812 }
813
814 // We're moving two or three locations to locations that could
815 // overlap, so we need a parallel move resolver.
816 InvokeRuntimeCallingConvention calling_convention;
817 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
818 parallel_move.AddMove(ref_,
819 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
820 Primitive::kPrimNot,
821 nullptr);
822 parallel_move.AddMove(obj_,
823 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
824 Primitive::kPrimNot,
825 nullptr);
826 if (index.IsValid()) {
827 parallel_move.AddMove(index,
828 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
829 Primitive::kPrimInt,
830 nullptr);
831 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
832 } else {
833 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
834 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
835 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100836 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000837 instruction_,
838 instruction_->GetDexPc(),
839 this);
840 CheckEntrypointTypes<
841 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
842 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
843
844 RestoreLiveRegisters(codegen, locations);
845 __ jmp(GetExitLabel());
846 }
847
848 const char* GetDescription() const OVERRIDE {
849 return "ReadBarrierForHeapReferenceSlowPathX86_64";
850 }
851
852 private:
853 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
854 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
855 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
856 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
857 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
858 return static_cast<CpuRegister>(i);
859 }
860 }
861 // We shall never fail to find a free caller-save register, as
862 // there are more than two core caller-save registers on x86-64
863 // (meaning it is possible to find one which is different from
864 // `ref` and `obj`).
865 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
866 LOG(FATAL) << "Could not find a free caller-save register";
867 UNREACHABLE();
868 }
869
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 const Location out_;
871 const Location ref_;
872 const Location obj_;
873 const uint32_t offset_;
874 // An additional location containing an index to an array.
875 // Only used for HArrayGet and the UnsafeGetObject &
876 // UnsafeGetObjectVolatile intrinsics.
877 const Location index_;
878
879 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
880};
881
882// Slow path generating a read barrier for a GC root.
883class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
884 public:
885 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000886 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000887 DCHECK(kEmitCompilerReadBarrier);
888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889
890 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
891 LocationSummary* locations = instruction_->GetLocations();
892 DCHECK(locations->CanCall());
893 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
895 << "Unexpected instruction in read barrier for GC root slow path: "
896 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000897
898 __ Bind(GetEntryLabel());
899 SaveLiveRegisters(codegen, locations);
900
901 InvokeRuntimeCallingConvention calling_convention;
902 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
903 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100904 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000905 instruction_,
906 instruction_->GetDexPc(),
907 this);
908 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
909 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
910
911 RestoreLiveRegisters(codegen, locations);
912 __ jmp(GetExitLabel());
913 }
914
915 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
916
917 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918 const Location out_;
919 const Location root_;
920
921 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
922};
923
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100925// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
926#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100927
Roland Levillain4fa13f62015-07-06 18:11:54 +0100928inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700929 switch (cond) {
930 case kCondEQ: return kEqual;
931 case kCondNE: return kNotEqual;
932 case kCondLT: return kLess;
933 case kCondLE: return kLessEqual;
934 case kCondGT: return kGreater;
935 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700936 case kCondB: return kBelow;
937 case kCondBE: return kBelowEqual;
938 case kCondA: return kAbove;
939 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700940 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100941 LOG(FATAL) << "Unreachable";
942 UNREACHABLE();
943}
944
Aart Bike9f37602015-10-09 11:15:55 -0700945// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100946inline Condition X86_64FPCondition(IfCondition cond) {
947 switch (cond) {
948 case kCondEQ: return kEqual;
949 case kCondNE: return kNotEqual;
950 case kCondLT: return kBelow;
951 case kCondLE: return kBelowEqual;
952 case kCondGT: return kAbove;
953 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700954 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100955 };
956 LOG(FATAL) << "Unreachable";
957 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700958}
959
Vladimir Markodc151b22015-10-15 18:02:30 +0100960HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
961 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100962 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +0100963 switch (desired_dispatch_info.code_ptr_location) {
964 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
965 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
966 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
967 return HInvokeStaticOrDirect::DispatchInfo {
968 desired_dispatch_info.method_load_kind,
969 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
970 desired_dispatch_info.method_load_data,
971 0u
972 };
973 default:
974 return desired_dispatch_info;
975 }
976}
977
Serguei Katkov288c7a82016-05-16 11:53:15 +0600978Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
979 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800980 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000981 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
982 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100983 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000984 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100985 uint32_t offset =
986 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
987 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000988 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100989 }
Vladimir Marko58155012015-08-19 12:49:41 +0000990 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000991 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000992 break;
993 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
994 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
995 break;
996 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
997 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000998 method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
999 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001000 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
1001 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001002 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +00001003 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001004 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001005 // Bind a new fixup label at the end of the "movl" insn.
1006 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01001007 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +00001008 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001009 }
Vladimir Marko58155012015-08-19 12:49:41 +00001010 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001011 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001012 Register method_reg;
1013 CpuRegister reg = temp.AsRegister<CpuRegister>();
1014 if (current_method.IsRegister()) {
1015 method_reg = current_method.AsRegister<Register>();
1016 } else {
1017 DCHECK(invoke->GetLocations()->Intrinsified());
1018 DCHECK(!current_method.IsValid());
1019 method_reg = reg.AsRegister();
1020 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1021 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001022 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001023 __ movq(reg,
1024 Address(CpuRegister(method_reg),
1025 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001026 // temp = temp[index_in_cache];
1027 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1028 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001029 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1030 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001031 }
Vladimir Marko58155012015-08-19 12:49:41 +00001032 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001033 return callee_method;
1034}
1035
1036void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1037 Location temp) {
1038 // All registers are assumed to be correctly set up.
1039 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001040
1041 switch (invoke->GetCodePtrLocation()) {
1042 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1043 __ call(&frame_entry_label_);
1044 break;
1045 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001046 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1047 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001048 Label* label = &relative_call_patches_.back().label;
1049 __ call(label); // Bind to the patch label, override at link time.
1050 __ Bind(label); // Bind the label at the end of the "call" insn.
1051 break;
1052 }
1053 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
1054 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +01001055 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
1056 LOG(FATAL) << "Unsupported";
1057 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00001058 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1059 // (callee_method + offset_of_quick_compiled_code)()
1060 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1061 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001062 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001063 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001064 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001065
1066 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001067}
1068
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001069void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1070 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1071 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1072 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001073
1074 // Use the calling convention instead of the location of the receiver, as
1075 // intrinsics may have put the receiver in a different register. In the intrinsics
1076 // slow path, the arguments have been moved to the right place, so here we are
1077 // guaranteed that the receiver is the first register of the calling convention.
1078 InvokeDexCallingConvention calling_convention;
1079 Register receiver = calling_convention.GetRegisterAt(0);
1080
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001081 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001082 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001083 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001084 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001085 // Instead of simply (possibly) unpoisoning `temp` here, we should
1086 // emit a read barrier for the previous class reference load.
1087 // However this is not required in practice, as this is an
1088 // intermediate/temporary reference and because the current
1089 // concurrent copying collector keeps the from-space memory
1090 // intact/accessible until the end of the marking phase (the
1091 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001092 __ MaybeUnpoisonHeapReference(temp);
1093 // temp = temp->GetMethodAt(method_offset);
1094 __ movq(temp, Address(temp, method_offset));
1095 // call temp->GetEntryPoint();
1096 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001097 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001098}
1099
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001100void CodeGeneratorX86_64::RecordSimplePatch() {
1101 if (GetCompilerOptions().GetIncludePatchInformation()) {
1102 simple_patches_.emplace_back();
1103 __ Bind(&simple_patches_.back());
1104 }
1105}
1106
Vladimir Markoaad75c62016-10-03 08:46:48 +00001107void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1108 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001109 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001110 __ Bind(&string_patches_.back().label);
1111}
1112
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001113void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08001114 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001115 __ Bind(&type_patches_.back().label);
1116}
1117
Vladimir Markoaad75c62016-10-03 08:46:48 +00001118Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1119 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001120 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001121 return &string_patches_.back().label;
1122}
1123
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001124Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1125 uint32_t element_offset) {
1126 // Add a patch entry and return the label.
1127 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1128 return &pc_relative_dex_cache_patches_.back().label;
1129}
1130
Vladimir Markoaad75c62016-10-03 08:46:48 +00001131// The label points to the end of the "movl" or another instruction but the literal offset
1132// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1133constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1134
1135template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1136inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1137 const ArenaDeque<PatchInfo<Label>>& infos,
1138 ArenaVector<LinkerPatch>* linker_patches) {
1139 for (const PatchInfo<Label>& info : infos) {
1140 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1141 linker_patches->push_back(
1142 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1143 }
1144}
1145
Vladimir Marko58155012015-08-19 12:49:41 +00001146void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1147 DCHECK(linker_patches->empty());
1148 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001149 method_patches_.size() +
1150 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001151 pc_relative_dex_cache_patches_.size() +
1152 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001153 string_patches_.size() +
1154 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001155 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001156 for (const PatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001157 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001158 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001159 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001160 for (const PatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001161 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001162 linker_patches->push_back(
1163 LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001164 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001165 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1166 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001167 for (const Label& label : simple_patches_) {
1168 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1169 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1170 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171 if (!GetCompilerOptions().IsBootImage()) {
1172 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1173 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001174 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001175 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001176 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001177 // These are always PC-relative, see GetSupportedLoadClassKind().
1178 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001179}
1180
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001181void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001182 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001183}
1184
1185void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001186 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001187}
1188
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001189size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1190 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1191 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001192}
1193
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001194size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1195 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1196 return kX86_64WordSize;
1197}
1198
1199size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1200 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1201 return kX86_64WordSize;
1202}
1203
1204size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1205 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1206 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001207}
1208
Calin Juravle175dc732015-08-25 15:42:32 +01001209void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1210 HInstruction* instruction,
1211 uint32_t dex_pc,
1212 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001213 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001214 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1215 if (EntrypointRequiresStackMap(entrypoint)) {
1216 RecordPcInfo(instruction, dex_pc, slow_path);
1217 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001218}
1219
Roland Levillaindec8f632016-07-22 17:10:06 +01001220void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1221 HInstruction* instruction,
1222 SlowPathCode* slow_path) {
1223 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001224 GenerateInvokeRuntime(entry_point_offset);
1225}
1226
1227void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001228 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1229}
1230
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001231static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001232// Use a fake return address register to mimic Quick.
1233static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001234CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001235 const X86_64InstructionSetFeatures& isa_features,
1236 const CompilerOptions& compiler_options,
1237 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001238 : CodeGenerator(graph,
1239 kNumberOfCpuRegisters,
1240 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001241 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001242 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1243 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001244 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001245 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1246 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001247 compiler_options,
1248 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001249 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001251 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001252 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001253 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001254 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001255 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001256 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1257 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001258 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001259 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1260 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001261 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001262 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001263 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1264 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001265 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1266}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001267
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001268InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1269 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001270 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001271 assembler_(codegen->GetAssembler()),
1272 codegen_(codegen) {}
1273
David Brazdil58282f42016-01-14 12:45:10 +00001274void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001275 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001276 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001277
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001278 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001279 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001280}
1281
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001282static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001283 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001284}
David Srbecky9d8606d2015-04-12 09:35:32 +01001285
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001286static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001287 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001288}
1289
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001290void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001291 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001292 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001293 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001294 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001295 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001296
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001297 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001298 __ testq(CpuRegister(RAX), Address(
1299 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001300 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001301 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001302
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001303 if (HasEmptyFrame()) {
1304 return;
1305 }
1306
Nicolas Geoffray98893962015-01-21 12:32:32 +00001307 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001308 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001309 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001310 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001311 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1312 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001313 }
1314 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001315
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001316 int adjust = GetFrameSize() - GetCoreSpillSize();
1317 __ subq(CpuRegister(RSP), Immediate(adjust));
1318 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001319 uint32_t xmm_spill_location = GetFpuSpillStart();
1320 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001321
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001322 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1323 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001324 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1325 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1326 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001327 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001328 }
1329
Mingyao Yang063fc772016-08-02 11:02:54 -07001330 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1331 // Initialize should_deoptimize flag to 0.
1332 __ movl(Address(CpuRegister(RSP), xmm_spill_location - kShouldDeoptimizeFlagSize),
1333 Immediate(0));
1334 }
1335
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001336 // Save the current method if we need it. Note that we do not
1337 // do this in HCurrentMethod, as the instruction might have been removed
1338 // in the SSA graph.
1339 if (RequiresCurrentMethod()) {
1340 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1341 CpuRegister(kMethodRegisterArgument));
1342 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001343}
1344
1345void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001346 __ cfi().RememberState();
1347 if (!HasEmptyFrame()) {
1348 uint32_t xmm_spill_location = GetFpuSpillStart();
1349 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1350 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1351 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1352 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1353 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1354 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1355 }
1356 }
1357
1358 int adjust = GetFrameSize() - GetCoreSpillSize();
1359 __ addq(CpuRegister(RSP), Immediate(adjust));
1360 __ cfi().AdjustCFAOffset(-adjust);
1361
1362 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1363 Register reg = kCoreCalleeSaves[i];
1364 if (allocated_registers_.ContainsCoreRegister(reg)) {
1365 __ popq(CpuRegister(reg));
1366 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1367 __ cfi().Restore(DWARFReg(reg));
1368 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001369 }
1370 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001371 __ ret();
1372 __ cfi().RestoreState();
1373 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001374}
1375
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001376void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1377 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001378}
1379
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001380void CodeGeneratorX86_64::Move(Location destination, Location source) {
1381 if (source.Equals(destination)) {
1382 return;
1383 }
1384 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001385 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001386 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001387 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001388 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001389 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001390 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001391 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1392 } else if (source.IsConstant()) {
1393 HConstant* constant = source.GetConstant();
1394 if (constant->IsLongConstant()) {
1395 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1396 } else {
1397 Load32BitValue(dest, GetInt32ValueOf(constant));
1398 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001399 } else {
1400 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001401 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001402 }
1403 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001404 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001405 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001406 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001407 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001408 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1409 } else if (source.IsConstant()) {
1410 HConstant* constant = source.GetConstant();
1411 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1412 if (constant->IsFloatConstant()) {
1413 Load32BitValue(dest, static_cast<int32_t>(value));
1414 } else {
1415 Load64BitValue(dest, value);
1416 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001417 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001418 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001419 } else {
1420 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001421 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001422 }
1423 } else if (destination.IsStackSlot()) {
1424 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001425 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001426 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001427 } else if (source.IsFpuRegister()) {
1428 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001429 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001430 } else if (source.IsConstant()) {
1431 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001432 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001433 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001434 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001435 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001436 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1437 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001438 }
1439 } else {
1440 DCHECK(destination.IsDoubleStackSlot());
1441 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001442 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001443 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001444 } else if (source.IsFpuRegister()) {
1445 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001446 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001447 } else if (source.IsConstant()) {
1448 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001449 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1450 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001451 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001452 } else {
1453 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001454 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1455 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001456 }
1457 }
1458}
1459
Calin Juravle175dc732015-08-25 15:42:32 +01001460void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1461 DCHECK(location.IsRegister());
1462 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1463}
1464
Calin Juravlee460d1d2015-09-29 04:52:17 +01001465void CodeGeneratorX86_64::MoveLocation(
1466 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1467 Move(dst, src);
1468}
1469
1470void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1471 if (location.IsRegister()) {
1472 locations->AddTemp(location);
1473 } else {
1474 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1475 }
1476}
1477
David Brazdilfc6a86a2015-06-26 10:33:45 +00001478void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001479 DCHECK(!successor->IsExitBlock());
1480
1481 HBasicBlock* block = got->GetBlock();
1482 HInstruction* previous = got->GetPrevious();
1483
1484 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001485 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001486 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1487 return;
1488 }
1489
1490 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1491 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1492 }
1493 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001494 __ jmp(codegen_->GetLabelOf(successor));
1495 }
1496}
1497
David Brazdilfc6a86a2015-06-26 10:33:45 +00001498void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1499 got->SetLocations(nullptr);
1500}
1501
1502void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1503 HandleGoto(got, got->GetSuccessor());
1504}
1505
1506void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1507 try_boundary->SetLocations(nullptr);
1508}
1509
1510void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1511 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1512 if (!successor->IsExitBlock()) {
1513 HandleGoto(try_boundary, successor);
1514 }
1515}
1516
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001517void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1518 exit->SetLocations(nullptr);
1519}
1520
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001521void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001522}
1523
Mark Mendell152408f2015-12-31 12:28:50 -05001524template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001525void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001526 LabelType* true_label,
1527 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001528 if (cond->IsFPConditionTrueIfNaN()) {
1529 __ j(kUnordered, true_label);
1530 } else if (cond->IsFPConditionFalseIfNaN()) {
1531 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001532 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001533 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001534}
1535
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001536void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001537 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001538
Mark Mendellc4701932015-04-10 13:18:51 -04001539 Location left = locations->InAt(0);
1540 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001541 Primitive::Type type = condition->InputAt(0)->GetType();
1542 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001543 case Primitive::kPrimBoolean:
1544 case Primitive::kPrimByte:
1545 case Primitive::kPrimChar:
1546 case Primitive::kPrimShort:
1547 case Primitive::kPrimInt:
1548 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001549 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001550 break;
1551 }
Mark Mendellc4701932015-04-10 13:18:51 -04001552 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001553 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001554 break;
1555 }
1556 case Primitive::kPrimFloat: {
1557 if (right.IsFpuRegister()) {
1558 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1559 } else if (right.IsConstant()) {
1560 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1561 codegen_->LiteralFloatAddress(
1562 right.GetConstant()->AsFloatConstant()->GetValue()));
1563 } else {
1564 DCHECK(right.IsStackSlot());
1565 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1566 Address(CpuRegister(RSP), right.GetStackIndex()));
1567 }
Mark Mendellc4701932015-04-10 13:18:51 -04001568 break;
1569 }
1570 case Primitive::kPrimDouble: {
1571 if (right.IsFpuRegister()) {
1572 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1573 } else if (right.IsConstant()) {
1574 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1575 codegen_->LiteralDoubleAddress(
1576 right.GetConstant()->AsDoubleConstant()->GetValue()));
1577 } else {
1578 DCHECK(right.IsDoubleStackSlot());
1579 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1580 Address(CpuRegister(RSP), right.GetStackIndex()));
1581 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001582 break;
1583 }
1584 default:
1585 LOG(FATAL) << "Unexpected condition type " << type;
1586 }
1587}
1588
1589template<class LabelType>
1590void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1591 LabelType* true_target_in,
1592 LabelType* false_target_in) {
1593 // Generated branching requires both targets to be explicit. If either of the
1594 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1595 LabelType fallthrough_target;
1596 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1597 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1598
1599 // Generate the comparison to set the CC.
1600 GenerateCompareTest(condition);
1601
1602 // Now generate the correct jump(s).
1603 Primitive::Type type = condition->InputAt(0)->GetType();
1604 switch (type) {
1605 case Primitive::kPrimLong: {
1606 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1607 break;
1608 }
1609 case Primitive::kPrimFloat: {
1610 GenerateFPJumps(condition, true_target, false_target);
1611 break;
1612 }
1613 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001614 GenerateFPJumps(condition, true_target, false_target);
1615 break;
1616 }
1617 default:
1618 LOG(FATAL) << "Unexpected condition type " << type;
1619 }
1620
David Brazdil0debae72015-11-12 18:37:00 +00001621 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001622 __ jmp(false_target);
1623 }
David Brazdil0debae72015-11-12 18:37:00 +00001624
1625 if (fallthrough_target.IsLinked()) {
1626 __ Bind(&fallthrough_target);
1627 }
Mark Mendellc4701932015-04-10 13:18:51 -04001628}
1629
David Brazdil0debae72015-11-12 18:37:00 +00001630static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1631 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1632 // are set only strictly before `branch`. We can't use the eflags on long
1633 // conditions if they are materialized due to the complex branching.
1634 return cond->IsCondition() &&
1635 cond->GetNext() == branch &&
1636 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1637}
1638
Mark Mendell152408f2015-12-31 12:28:50 -05001639template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001640void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001641 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001642 LabelType* true_target,
1643 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001644 HInstruction* cond = instruction->InputAt(condition_input_index);
1645
1646 if (true_target == nullptr && false_target == nullptr) {
1647 // Nothing to do. The code always falls through.
1648 return;
1649 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001650 // Constant condition, statically compared against "true" (integer value 1).
1651 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001652 if (true_target != nullptr) {
1653 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001654 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001655 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001656 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001657 if (false_target != nullptr) {
1658 __ jmp(false_target);
1659 }
1660 }
1661 return;
1662 }
1663
1664 // The following code generates these patterns:
1665 // (1) true_target == nullptr && false_target != nullptr
1666 // - opposite condition true => branch to false_target
1667 // (2) true_target != nullptr && false_target == nullptr
1668 // - condition true => branch to true_target
1669 // (3) true_target != nullptr && false_target != nullptr
1670 // - condition true => branch to true_target
1671 // - branch to false_target
1672 if (IsBooleanValueOrMaterializedCondition(cond)) {
1673 if (AreEflagsSetFrom(cond, instruction)) {
1674 if (true_target == nullptr) {
1675 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1676 } else {
1677 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1678 }
1679 } else {
1680 // Materialized condition, compare against 0.
1681 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1682 if (lhs.IsRegister()) {
1683 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1684 } else {
1685 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1686 }
1687 if (true_target == nullptr) {
1688 __ j(kEqual, false_target);
1689 } else {
1690 __ j(kNotEqual, true_target);
1691 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001692 }
1693 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001694 // Condition has not been materialized, use its inputs as the
1695 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001696 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001697
David Brazdil0debae72015-11-12 18:37:00 +00001698 // If this is a long or FP comparison that has been folded into
1699 // the HCondition, generate the comparison directly.
1700 Primitive::Type type = condition->InputAt(0)->GetType();
1701 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1702 GenerateCompareTestAndBranch(condition, true_target, false_target);
1703 return;
1704 }
1705
1706 Location lhs = condition->GetLocations()->InAt(0);
1707 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001708 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001709 if (true_target == nullptr) {
1710 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1711 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001712 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001713 }
Dave Allison20dfc792014-06-16 20:44:29 -07001714 }
David Brazdil0debae72015-11-12 18:37:00 +00001715
1716 // If neither branch falls through (case 3), the conditional branch to `true_target`
1717 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1718 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001719 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001720 }
1721}
1722
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001723void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001724 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1725 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001726 locations->SetInAt(0, Location::Any());
1727 }
1728}
1729
1730void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001731 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1732 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1733 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1734 nullptr : codegen_->GetLabelOf(true_successor);
1735 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1736 nullptr : codegen_->GetLabelOf(false_successor);
1737 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001738}
1739
1740void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1741 LocationSummary* locations = new (GetGraph()->GetArena())
1742 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001743 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001744 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001745 locations->SetInAt(0, Location::Any());
1746 }
1747}
1748
1749void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001750 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001751 GenerateTestAndBranch<Label>(deoptimize,
1752 /* condition_input_index */ 0,
1753 slow_path->GetEntryLabel(),
1754 /* false_target */ nullptr);
1755}
1756
Mingyao Yang063fc772016-08-02 11:02:54 -07001757void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1758 LocationSummary* locations = new (GetGraph()->GetArena())
1759 LocationSummary(flag, LocationSummary::kNoCall);
1760 locations->SetOut(Location::RequiresRegister());
1761}
1762
1763void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1764 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1765 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1766}
1767
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001768static bool SelectCanUseCMOV(HSelect* select) {
1769 // There are no conditional move instructions for XMMs.
1770 if (Primitive::IsFloatingPointType(select->GetType())) {
1771 return false;
1772 }
1773
1774 // A FP condition doesn't generate the single CC that we need.
1775 HInstruction* condition = select->GetCondition();
1776 if (condition->IsCondition() &&
1777 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1778 return false;
1779 }
1780
1781 // We can generate a CMOV for this Select.
1782 return true;
1783}
1784
David Brazdil74eb1b22015-12-14 11:44:01 +00001785void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1786 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1787 if (Primitive::IsFloatingPointType(select->GetType())) {
1788 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001789 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001790 } else {
1791 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001792 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001793 if (select->InputAt(1)->IsConstant()) {
1794 locations->SetInAt(1, Location::RequiresRegister());
1795 } else {
1796 locations->SetInAt(1, Location::Any());
1797 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001798 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001799 locations->SetInAt(1, Location::Any());
1800 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001801 }
1802 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1803 locations->SetInAt(2, Location::RequiresRegister());
1804 }
1805 locations->SetOut(Location::SameAsFirstInput());
1806}
1807
1808void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1809 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001810 if (SelectCanUseCMOV(select)) {
1811 // If both the condition and the source types are integer, we can generate
1812 // a CMOV to implement Select.
1813 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001814 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001815 DCHECK(locations->InAt(0).Equals(locations->Out()));
1816
1817 HInstruction* select_condition = select->GetCondition();
1818 Condition cond = kNotEqual;
1819
1820 // Figure out how to test the 'condition'.
1821 if (select_condition->IsCondition()) {
1822 HCondition* condition = select_condition->AsCondition();
1823 if (!condition->IsEmittedAtUseSite()) {
1824 // This was a previously materialized condition.
1825 // Can we use the existing condition code?
1826 if (AreEflagsSetFrom(condition, select)) {
1827 // Materialization was the previous instruction. Condition codes are right.
1828 cond = X86_64IntegerCondition(condition->GetCondition());
1829 } else {
1830 // No, we have to recreate the condition code.
1831 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1832 __ testl(cond_reg, cond_reg);
1833 }
1834 } else {
1835 GenerateCompareTest(condition);
1836 cond = X86_64IntegerCondition(condition->GetCondition());
1837 }
1838 } else {
1839 // Must be a boolean condition, which needs to be compared to 0.
1840 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1841 __ testl(cond_reg, cond_reg);
1842 }
1843
1844 // If the condition is true, overwrite the output, which already contains false.
1845 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001846 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1847 if (value_true_loc.IsRegister()) {
1848 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1849 } else {
1850 __ cmov(cond,
1851 value_false,
1852 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1853 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001854 } else {
1855 NearLabel false_target;
1856 GenerateTestAndBranch<NearLabel>(select,
1857 /* condition_input_index */ 2,
1858 /* true_target */ nullptr,
1859 &false_target);
1860 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1861 __ Bind(&false_target);
1862 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001863}
1864
David Srbecky0cf44932015-12-09 14:09:59 +00001865void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1866 new (GetGraph()->GetArena()) LocationSummary(info);
1867}
1868
David Srbeckyd28f4a02016-03-14 17:14:24 +00001869void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1870 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001871}
1872
1873void CodeGeneratorX86_64::GenerateNop() {
1874 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001875}
1876
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001877void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001878 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001879 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001880 // Handle the long/FP comparisons made in instruction simplification.
1881 switch (cond->InputAt(0)->GetType()) {
1882 case Primitive::kPrimLong:
1883 locations->SetInAt(0, Location::RequiresRegister());
1884 locations->SetInAt(1, Location::Any());
1885 break;
1886 case Primitive::kPrimFloat:
1887 case Primitive::kPrimDouble:
1888 locations->SetInAt(0, Location::RequiresFpuRegister());
1889 locations->SetInAt(1, Location::Any());
1890 break;
1891 default:
1892 locations->SetInAt(0, Location::RequiresRegister());
1893 locations->SetInAt(1, Location::Any());
1894 break;
1895 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001896 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001897 locations->SetOut(Location::RequiresRegister());
1898 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001899}
1900
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001901void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001902 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001903 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001904 }
Mark Mendellc4701932015-04-10 13:18:51 -04001905
1906 LocationSummary* locations = cond->GetLocations();
1907 Location lhs = locations->InAt(0);
1908 Location rhs = locations->InAt(1);
1909 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001910 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001911
1912 switch (cond->InputAt(0)->GetType()) {
1913 default:
1914 // Integer case.
1915
1916 // Clear output register: setcc only sets the low byte.
1917 __ xorl(reg, reg);
1918
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001919 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001920 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001921 return;
1922 case Primitive::kPrimLong:
1923 // Clear output register: setcc only sets the low byte.
1924 __ xorl(reg, reg);
1925
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001926 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001927 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001928 return;
1929 case Primitive::kPrimFloat: {
1930 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1931 if (rhs.IsConstant()) {
1932 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1933 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1934 } else if (rhs.IsStackSlot()) {
1935 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1936 } else {
1937 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1938 }
1939 GenerateFPJumps(cond, &true_label, &false_label);
1940 break;
1941 }
1942 case Primitive::kPrimDouble: {
1943 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1944 if (rhs.IsConstant()) {
1945 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1946 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1947 } else if (rhs.IsDoubleStackSlot()) {
1948 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1949 } else {
1950 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1951 }
1952 GenerateFPJumps(cond, &true_label, &false_label);
1953 break;
1954 }
1955 }
1956
1957 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001958 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001959
Roland Levillain4fa13f62015-07-06 18:11:54 +01001960 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001961 __ Bind(&false_label);
1962 __ xorl(reg, reg);
1963 __ jmp(&done_label);
1964
Roland Levillain4fa13f62015-07-06 18:11:54 +01001965 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001966 __ Bind(&true_label);
1967 __ movl(reg, Immediate(1));
1968 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001969}
1970
1971void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001973}
1974
1975void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001976 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001977}
1978
1979void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001980 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001981}
1982
1983void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001984 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001985}
1986
1987void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001989}
1990
1991void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001992 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001993}
1994
1995void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001996 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001997}
1998
1999void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002000 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002001}
2002
2003void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002004 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002005}
2006
2007void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002008 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002009}
2010
2011void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002012 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002013}
2014
2015void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002016 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002017}
2018
Aart Bike9f37602015-10-09 11:15:55 -07002019void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002020 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002021}
2022
2023void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002024 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002025}
2026
2027void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002028 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002029}
2030
2031void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002032 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002033}
2034
2035void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002036 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002037}
2038
2039void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002040 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002041}
2042
2043void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002044 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002045}
2046
2047void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002048 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002049}
2050
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002051void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002052 LocationSummary* locations =
2053 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002054 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002055 case Primitive::kPrimBoolean:
2056 case Primitive::kPrimByte:
2057 case Primitive::kPrimShort:
2058 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002059 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002060 case Primitive::kPrimLong: {
2061 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002062 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002063 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2064 break;
2065 }
2066 case Primitive::kPrimFloat:
2067 case Primitive::kPrimDouble: {
2068 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002069 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002070 locations->SetOut(Location::RequiresRegister());
2071 break;
2072 }
2073 default:
2074 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2075 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002076}
2077
2078void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002079 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002080 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002081 Location left = locations->InAt(0);
2082 Location right = locations->InAt(1);
2083
Mark Mendell0c9497d2015-08-21 09:30:05 -04002084 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002085 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002086 Condition less_cond = kLess;
2087
Calin Juravleddb7df22014-11-25 20:56:51 +00002088 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002089 case Primitive::kPrimBoolean:
2090 case Primitive::kPrimByte:
2091 case Primitive::kPrimShort:
2092 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002093 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002094 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002095 break;
2096 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002097 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002098 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002099 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002100 }
2101 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002102 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2103 if (right.IsConstant()) {
2104 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2105 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2106 } else if (right.IsStackSlot()) {
2107 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2108 } else {
2109 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2110 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002111 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002112 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002113 break;
2114 }
2115 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002116 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2117 if (right.IsConstant()) {
2118 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2119 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2120 } else if (right.IsDoubleStackSlot()) {
2121 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2122 } else {
2123 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2124 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002125 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002126 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002127 break;
2128 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002129 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002130 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002131 }
Aart Bika19616e2016-02-01 18:57:58 -08002132
Calin Juravleddb7df22014-11-25 20:56:51 +00002133 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002134 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002135 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002136
Calin Juravle91debbc2014-11-26 19:01:09 +00002137 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002138 __ movl(out, Immediate(1));
2139 __ jmp(&done);
2140
2141 __ Bind(&less);
2142 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002143
2144 __ Bind(&done);
2145}
2146
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002147void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002148 LocationSummary* locations =
2149 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002150 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002151}
2152
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002153void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002154 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002155}
2156
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002157void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2158 LocationSummary* locations =
2159 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2160 locations->SetOut(Location::ConstantLocation(constant));
2161}
2162
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002163void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002164 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002165}
2166
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002167void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002168 LocationSummary* locations =
2169 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002170 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002171}
2172
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002173void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002174 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002175}
2176
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002177void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2178 LocationSummary* locations =
2179 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2180 locations->SetOut(Location::ConstantLocation(constant));
2181}
2182
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002183void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002184 // Will be generated at use site.
2185}
2186
2187void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2188 LocationSummary* locations =
2189 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2190 locations->SetOut(Location::ConstantLocation(constant));
2191}
2192
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002193void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2194 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002195 // Will be generated at use site.
2196}
2197
Calin Juravle27df7582015-04-17 19:12:31 +01002198void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2199 memory_barrier->SetLocations(nullptr);
2200}
2201
2202void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002203 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002204}
2205
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002206void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2207 ret->SetLocations(nullptr);
2208}
2209
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002210void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002211 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002212}
2213
2214void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002215 LocationSummary* locations =
2216 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002217 switch (ret->InputAt(0)->GetType()) {
2218 case Primitive::kPrimBoolean:
2219 case Primitive::kPrimByte:
2220 case Primitive::kPrimChar:
2221 case Primitive::kPrimShort:
2222 case Primitive::kPrimInt:
2223 case Primitive::kPrimNot:
2224 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002225 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002226 break;
2227
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002228 case Primitive::kPrimFloat:
2229 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002230 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002231 break;
2232
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002233 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002234 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002235 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002236}
2237
2238void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2239 if (kIsDebugBuild) {
2240 switch (ret->InputAt(0)->GetType()) {
2241 case Primitive::kPrimBoolean:
2242 case Primitive::kPrimByte:
2243 case Primitive::kPrimChar:
2244 case Primitive::kPrimShort:
2245 case Primitive::kPrimInt:
2246 case Primitive::kPrimNot:
2247 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002248 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002249 break;
2250
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002251 case Primitive::kPrimFloat:
2252 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002253 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002254 XMM0);
2255 break;
2256
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002257 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002258 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002259 }
2260 }
2261 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002262}
2263
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002264Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2265 switch (type) {
2266 case Primitive::kPrimBoolean:
2267 case Primitive::kPrimByte:
2268 case Primitive::kPrimChar:
2269 case Primitive::kPrimShort:
2270 case Primitive::kPrimInt:
2271 case Primitive::kPrimNot:
2272 case Primitive::kPrimLong:
2273 return Location::RegisterLocation(RAX);
2274
2275 case Primitive::kPrimVoid:
2276 return Location::NoLocation();
2277
2278 case Primitive::kPrimDouble:
2279 case Primitive::kPrimFloat:
2280 return Location::FpuRegisterLocation(XMM0);
2281 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002282
2283 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002284}
2285
2286Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2287 return Location::RegisterLocation(kMethodRegisterArgument);
2288}
2289
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002290Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002291 switch (type) {
2292 case Primitive::kPrimBoolean:
2293 case Primitive::kPrimByte:
2294 case Primitive::kPrimChar:
2295 case Primitive::kPrimShort:
2296 case Primitive::kPrimInt:
2297 case Primitive::kPrimNot: {
2298 uint32_t index = gp_index_++;
2299 stack_index_++;
2300 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002301 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002302 } else {
2303 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2304 }
2305 }
2306
2307 case Primitive::kPrimLong: {
2308 uint32_t index = gp_index_;
2309 stack_index_ += 2;
2310 if (index < calling_convention.GetNumberOfRegisters()) {
2311 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002312 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002313 } else {
2314 gp_index_ += 2;
2315 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2316 }
2317 }
2318
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002319 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002320 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002321 stack_index_++;
2322 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002323 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002324 } else {
2325 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2326 }
2327 }
2328
2329 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002330 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002331 stack_index_ += 2;
2332 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002333 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002334 } else {
2335 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2336 }
2337 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002338
2339 case Primitive::kPrimVoid:
2340 LOG(FATAL) << "Unexpected parameter type " << type;
2341 break;
2342 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002343 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002344}
2345
Calin Juravle175dc732015-08-25 15:42:32 +01002346void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2347 // The trampoline uses the same calling convention as dex calling conventions,
2348 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2349 // the method_idx.
2350 HandleInvoke(invoke);
2351}
2352
2353void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2354 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2355}
2356
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002357void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002358 // Explicit clinit checks triggered by static invokes must have been pruned by
2359 // art::PrepareForRegisterAllocation.
2360 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002361
Mark Mendellfb8d2792015-03-31 22:16:59 -04002362 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002363 if (intrinsic.TryDispatch(invoke)) {
2364 return;
2365 }
2366
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002367 HandleInvoke(invoke);
2368}
2369
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002370static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2371 if (invoke->GetLocations()->Intrinsified()) {
2372 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2373 intrinsic.Dispatch(invoke);
2374 return true;
2375 }
2376 return false;
2377}
2378
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002379void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002380 // Explicit clinit checks triggered by static invokes must have been pruned by
2381 // art::PrepareForRegisterAllocation.
2382 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002383
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002384 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2385 return;
2386 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002387
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002388 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002389 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002390 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002391 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002392}
2393
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002394void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002395 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002396 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002397}
2398
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002399void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002400 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002401 if (intrinsic.TryDispatch(invoke)) {
2402 return;
2403 }
2404
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002405 HandleInvoke(invoke);
2406}
2407
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002408void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002409 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2410 return;
2411 }
2412
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002413 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002414 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002415 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002416}
2417
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002418void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2419 HandleInvoke(invoke);
2420 // Add the hidden argument.
2421 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2422}
2423
2424void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2425 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002426 LocationSummary* locations = invoke->GetLocations();
2427 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2428 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002429 Location receiver = locations->InAt(0);
2430 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2431
Roland Levillain0d5a2812015-11-13 10:07:31 +00002432 // Set the hidden argument. This is safe to do this here, as RAX
2433 // won't be modified thereafter, before the `call` instruction.
2434 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002435 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002436
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002437 if (receiver.IsStackSlot()) {
2438 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002439 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002440 __ movl(temp, Address(temp, class_offset));
2441 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002442 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002443 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002444 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002445 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002446 // Instead of simply (possibly) unpoisoning `temp` here, we should
2447 // emit a read barrier for the previous class reference load.
2448 // However this is not required in practice, as this is an
2449 // intermediate/temporary reference and because the current
2450 // concurrent copying collector keeps the from-space memory
2451 // intact/accessible until the end of the marking phase (the
2452 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002453 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002454 // temp = temp->GetAddressOfIMT()
2455 __ movq(temp,
2456 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2457 // temp = temp->GetImtEntryAt(method_offset);
2458 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002459 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002460 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002461 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002462 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002463 __ call(Address(
2464 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002465
2466 DCHECK(!codegen_->IsLeafMethod());
2467 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2468}
2469
Roland Levillain88cb1752014-10-20 16:36:47 +01002470void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2471 LocationSummary* locations =
2472 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2473 switch (neg->GetResultType()) {
2474 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002475 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002476 locations->SetInAt(0, Location::RequiresRegister());
2477 locations->SetOut(Location::SameAsFirstInput());
2478 break;
2479
Roland Levillain88cb1752014-10-20 16:36:47 +01002480 case Primitive::kPrimFloat:
2481 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002482 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002483 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002484 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002485 break;
2486
2487 default:
2488 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2489 }
2490}
2491
2492void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2493 LocationSummary* locations = neg->GetLocations();
2494 Location out = locations->Out();
2495 Location in = locations->InAt(0);
2496 switch (neg->GetResultType()) {
2497 case Primitive::kPrimInt:
2498 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002499 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002500 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002501 break;
2502
2503 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002504 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002505 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002506 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002507 break;
2508
Roland Levillain5368c212014-11-27 15:03:41 +00002509 case Primitive::kPrimFloat: {
2510 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002511 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002512 // Implement float negation with an exclusive or with value
2513 // 0x80000000 (mask for bit 31, representing the sign of a
2514 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002515 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002516 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002517 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002518 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002519
Roland Levillain5368c212014-11-27 15:03:41 +00002520 case Primitive::kPrimDouble: {
2521 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002522 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002523 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002524 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002525 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002526 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002527 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002528 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002529 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002530
2531 default:
2532 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2533 }
2534}
2535
Roland Levillaindff1f282014-11-05 14:15:05 +00002536void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2537 LocationSummary* locations =
2538 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2539 Primitive::Type result_type = conversion->GetResultType();
2540 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002541 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002542
David Brazdilb2bd1c52015-03-25 11:17:37 +00002543 // The Java language does not allow treating boolean as an integral type but
2544 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002545
Roland Levillaindff1f282014-11-05 14:15:05 +00002546 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002547 case Primitive::kPrimByte:
2548 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002549 case Primitive::kPrimLong:
2550 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002551 case Primitive::kPrimBoolean:
2552 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002553 case Primitive::kPrimShort:
2554 case Primitive::kPrimInt:
2555 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002556 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002557 locations->SetInAt(0, Location::Any());
2558 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2559 break;
2560
2561 default:
2562 LOG(FATAL) << "Unexpected type conversion from " << input_type
2563 << " to " << result_type;
2564 }
2565 break;
2566
Roland Levillain01a8d712014-11-14 16:27:39 +00002567 case Primitive::kPrimShort:
2568 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002569 case Primitive::kPrimLong:
2570 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002571 case Primitive::kPrimBoolean:
2572 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002573 case Primitive::kPrimByte:
2574 case Primitive::kPrimInt:
2575 case Primitive::kPrimChar:
2576 // Processing a Dex `int-to-short' instruction.
2577 locations->SetInAt(0, Location::Any());
2578 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2579 break;
2580
2581 default:
2582 LOG(FATAL) << "Unexpected type conversion from " << input_type
2583 << " to " << result_type;
2584 }
2585 break;
2586
Roland Levillain946e1432014-11-11 17:35:19 +00002587 case Primitive::kPrimInt:
2588 switch (input_type) {
2589 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002590 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002591 locations->SetInAt(0, Location::Any());
2592 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2593 break;
2594
2595 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002596 // Processing a Dex `float-to-int' instruction.
2597 locations->SetInAt(0, Location::RequiresFpuRegister());
2598 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002599 break;
2600
Roland Levillain946e1432014-11-11 17:35:19 +00002601 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002602 // Processing a Dex `double-to-int' instruction.
2603 locations->SetInAt(0, Location::RequiresFpuRegister());
2604 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002605 break;
2606
2607 default:
2608 LOG(FATAL) << "Unexpected type conversion from " << input_type
2609 << " to " << result_type;
2610 }
2611 break;
2612
Roland Levillaindff1f282014-11-05 14:15:05 +00002613 case Primitive::kPrimLong:
2614 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002615 case Primitive::kPrimBoolean:
2616 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002617 case Primitive::kPrimByte:
2618 case Primitive::kPrimShort:
2619 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002620 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002621 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002622 // TODO: We would benefit from a (to-be-implemented)
2623 // Location::RegisterOrStackSlot requirement for this input.
2624 locations->SetInAt(0, Location::RequiresRegister());
2625 locations->SetOut(Location::RequiresRegister());
2626 break;
2627
2628 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002629 // Processing a Dex `float-to-long' instruction.
2630 locations->SetInAt(0, Location::RequiresFpuRegister());
2631 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002632 break;
2633
Roland Levillaindff1f282014-11-05 14:15:05 +00002634 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002635 // Processing a Dex `double-to-long' instruction.
2636 locations->SetInAt(0, Location::RequiresFpuRegister());
2637 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002638 break;
2639
2640 default:
2641 LOG(FATAL) << "Unexpected type conversion from " << input_type
2642 << " to " << result_type;
2643 }
2644 break;
2645
Roland Levillain981e4542014-11-14 11:47:14 +00002646 case Primitive::kPrimChar:
2647 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002648 case Primitive::kPrimLong:
2649 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002650 case Primitive::kPrimBoolean:
2651 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002652 case Primitive::kPrimByte:
2653 case Primitive::kPrimShort:
2654 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002655 // Processing a Dex `int-to-char' instruction.
2656 locations->SetInAt(0, Location::Any());
2657 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2658 break;
2659
2660 default:
2661 LOG(FATAL) << "Unexpected type conversion from " << input_type
2662 << " to " << result_type;
2663 }
2664 break;
2665
Roland Levillaindff1f282014-11-05 14:15:05 +00002666 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002667 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002668 case Primitive::kPrimBoolean:
2669 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002670 case Primitive::kPrimByte:
2671 case Primitive::kPrimShort:
2672 case Primitive::kPrimInt:
2673 case Primitive::kPrimChar:
2674 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002675 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002676 locations->SetOut(Location::RequiresFpuRegister());
2677 break;
2678
2679 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002680 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002681 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002682 locations->SetOut(Location::RequiresFpuRegister());
2683 break;
2684
Roland Levillaincff13742014-11-17 14:32:17 +00002685 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002686 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002687 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002688 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002689 break;
2690
2691 default:
2692 LOG(FATAL) << "Unexpected type conversion from " << input_type
2693 << " to " << result_type;
2694 };
2695 break;
2696
Roland Levillaindff1f282014-11-05 14:15:05 +00002697 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002698 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002699 case Primitive::kPrimBoolean:
2700 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002701 case Primitive::kPrimByte:
2702 case Primitive::kPrimShort:
2703 case Primitive::kPrimInt:
2704 case Primitive::kPrimChar:
2705 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002706 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002707 locations->SetOut(Location::RequiresFpuRegister());
2708 break;
2709
2710 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002711 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002712 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002713 locations->SetOut(Location::RequiresFpuRegister());
2714 break;
2715
Roland Levillaincff13742014-11-17 14:32:17 +00002716 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002717 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002718 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002719 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002720 break;
2721
2722 default:
2723 LOG(FATAL) << "Unexpected type conversion from " << input_type
2724 << " to " << result_type;
2725 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002726 break;
2727
2728 default:
2729 LOG(FATAL) << "Unexpected type conversion from " << input_type
2730 << " to " << result_type;
2731 }
2732}
2733
2734void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2735 LocationSummary* locations = conversion->GetLocations();
2736 Location out = locations->Out();
2737 Location in = locations->InAt(0);
2738 Primitive::Type result_type = conversion->GetResultType();
2739 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002740 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002741 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002742 case Primitive::kPrimByte:
2743 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002744 case Primitive::kPrimLong:
2745 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002746 case Primitive::kPrimBoolean:
2747 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002748 case Primitive::kPrimShort:
2749 case Primitive::kPrimInt:
2750 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002751 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002752 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002753 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002754 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002755 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002756 Address(CpuRegister(RSP), in.GetStackIndex()));
2757 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002758 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002759 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002760 }
2761 break;
2762
2763 default:
2764 LOG(FATAL) << "Unexpected type conversion from " << input_type
2765 << " to " << result_type;
2766 }
2767 break;
2768
Roland Levillain01a8d712014-11-14 16:27:39 +00002769 case Primitive::kPrimShort:
2770 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002771 case Primitive::kPrimLong:
2772 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002773 case Primitive::kPrimBoolean:
2774 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002775 case Primitive::kPrimByte:
2776 case Primitive::kPrimInt:
2777 case Primitive::kPrimChar:
2778 // Processing a Dex `int-to-short' instruction.
2779 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002780 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002781 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002782 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002783 Address(CpuRegister(RSP), in.GetStackIndex()));
2784 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002785 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002786 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002787 }
2788 break;
2789
2790 default:
2791 LOG(FATAL) << "Unexpected type conversion from " << input_type
2792 << " to " << result_type;
2793 }
2794 break;
2795
Roland Levillain946e1432014-11-11 17:35:19 +00002796 case Primitive::kPrimInt:
2797 switch (input_type) {
2798 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002799 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002800 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002801 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002802 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002803 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002804 Address(CpuRegister(RSP), in.GetStackIndex()));
2805 } else {
2806 DCHECK(in.IsConstant());
2807 DCHECK(in.GetConstant()->IsLongConstant());
2808 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002809 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002810 }
2811 break;
2812
Roland Levillain3f8f9362014-12-02 17:45:01 +00002813 case Primitive::kPrimFloat: {
2814 // Processing a Dex `float-to-int' instruction.
2815 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2816 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002817 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002818
2819 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002820 // if input >= (float)INT_MAX goto done
2821 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002822 __ j(kAboveEqual, &done);
2823 // if input == NaN goto nan
2824 __ j(kUnordered, &nan);
2825 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002826 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002827 __ jmp(&done);
2828 __ Bind(&nan);
2829 // output = 0
2830 __ xorl(output, output);
2831 __ Bind(&done);
2832 break;
2833 }
2834
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002835 case Primitive::kPrimDouble: {
2836 // Processing a Dex `double-to-int' instruction.
2837 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2838 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002839 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002840
2841 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002842 // if input >= (double)INT_MAX goto done
2843 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002844 __ j(kAboveEqual, &done);
2845 // if input == NaN goto nan
2846 __ j(kUnordered, &nan);
2847 // output = double-to-int-truncate(input)
2848 __ cvttsd2si(output, input);
2849 __ jmp(&done);
2850 __ Bind(&nan);
2851 // output = 0
2852 __ xorl(output, output);
2853 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002854 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002855 }
Roland Levillain946e1432014-11-11 17:35:19 +00002856
2857 default:
2858 LOG(FATAL) << "Unexpected type conversion from " << input_type
2859 << " to " << result_type;
2860 }
2861 break;
2862
Roland Levillaindff1f282014-11-05 14:15:05 +00002863 case Primitive::kPrimLong:
2864 switch (input_type) {
2865 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002866 case Primitive::kPrimBoolean:
2867 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002868 case Primitive::kPrimByte:
2869 case Primitive::kPrimShort:
2870 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002871 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002872 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002873 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002874 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002875 break;
2876
Roland Levillain624279f2014-12-04 11:54:28 +00002877 case Primitive::kPrimFloat: {
2878 // Processing a Dex `float-to-long' instruction.
2879 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2880 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002881 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002882
Mark Mendell92e83bf2015-05-07 11:25:03 -04002883 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002884 // if input >= (float)LONG_MAX goto done
2885 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002886 __ j(kAboveEqual, &done);
2887 // if input == NaN goto nan
2888 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002889 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002890 __ cvttss2si(output, input, true);
2891 __ jmp(&done);
2892 __ Bind(&nan);
2893 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002894 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002895 __ Bind(&done);
2896 break;
2897 }
2898
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002899 case Primitive::kPrimDouble: {
2900 // Processing a Dex `double-to-long' instruction.
2901 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2902 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002903 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002904
Mark Mendell92e83bf2015-05-07 11:25:03 -04002905 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002906 // if input >= (double)LONG_MAX goto done
2907 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002908 __ j(kAboveEqual, &done);
2909 // if input == NaN goto nan
2910 __ j(kUnordered, &nan);
2911 // output = double-to-long-truncate(input)
2912 __ cvttsd2si(output, input, true);
2913 __ jmp(&done);
2914 __ Bind(&nan);
2915 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002916 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002917 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002918 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002919 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002920
2921 default:
2922 LOG(FATAL) << "Unexpected type conversion from " << input_type
2923 << " to " << result_type;
2924 }
2925 break;
2926
Roland Levillain981e4542014-11-14 11:47:14 +00002927 case Primitive::kPrimChar:
2928 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002929 case Primitive::kPrimLong:
2930 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002931 case Primitive::kPrimBoolean:
2932 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002933 case Primitive::kPrimByte:
2934 case Primitive::kPrimShort:
2935 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002936 // Processing a Dex `int-to-char' instruction.
2937 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002938 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002939 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002940 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002941 Address(CpuRegister(RSP), in.GetStackIndex()));
2942 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002943 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002944 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002945 }
2946 break;
2947
2948 default:
2949 LOG(FATAL) << "Unexpected type conversion from " << input_type
2950 << " to " << result_type;
2951 }
2952 break;
2953
Roland Levillaindff1f282014-11-05 14:15:05 +00002954 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002955 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002956 case Primitive::kPrimBoolean:
2957 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002958 case Primitive::kPrimByte:
2959 case Primitive::kPrimShort:
2960 case Primitive::kPrimInt:
2961 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002962 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002963 if (in.IsRegister()) {
2964 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2965 } else if (in.IsConstant()) {
2966 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2967 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002968 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002969 } else {
2970 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2971 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2972 }
Roland Levillaincff13742014-11-17 14:32:17 +00002973 break;
2974
2975 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002976 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002977 if (in.IsRegister()) {
2978 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2979 } else if (in.IsConstant()) {
2980 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2981 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002982 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002983 } else {
2984 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2985 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2986 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002987 break;
2988
Roland Levillaincff13742014-11-17 14:32:17 +00002989 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002990 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002991 if (in.IsFpuRegister()) {
2992 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2993 } else if (in.IsConstant()) {
2994 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2995 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002996 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002997 } else {
2998 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2999 Address(CpuRegister(RSP), in.GetStackIndex()));
3000 }
Roland Levillaincff13742014-11-17 14:32:17 +00003001 break;
3002
3003 default:
3004 LOG(FATAL) << "Unexpected type conversion from " << input_type
3005 << " to " << result_type;
3006 };
3007 break;
3008
Roland Levillaindff1f282014-11-05 14:15:05 +00003009 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00003010 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00003011 case Primitive::kPrimBoolean:
3012 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00003013 case Primitive::kPrimByte:
3014 case Primitive::kPrimShort:
3015 case Primitive::kPrimInt:
3016 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00003017 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003018 if (in.IsRegister()) {
3019 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3020 } else if (in.IsConstant()) {
3021 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3022 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003023 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003024 } else {
3025 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3026 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3027 }
Roland Levillaincff13742014-11-17 14:32:17 +00003028 break;
3029
3030 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003031 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003032 if (in.IsRegister()) {
3033 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3034 } else if (in.IsConstant()) {
3035 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3036 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003037 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003038 } else {
3039 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3040 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3041 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003042 break;
3043
Roland Levillaincff13742014-11-17 14:32:17 +00003044 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003045 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003046 if (in.IsFpuRegister()) {
3047 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3048 } else if (in.IsConstant()) {
3049 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3050 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003051 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003052 } else {
3053 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3054 Address(CpuRegister(RSP), in.GetStackIndex()));
3055 }
Roland Levillaincff13742014-11-17 14:32:17 +00003056 break;
3057
3058 default:
3059 LOG(FATAL) << "Unexpected type conversion from " << input_type
3060 << " to " << result_type;
3061 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003062 break;
3063
3064 default:
3065 LOG(FATAL) << "Unexpected type conversion from " << input_type
3066 << " to " << result_type;
3067 }
3068}
3069
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003070void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003071 LocationSummary* locations =
3072 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003073 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003074 case Primitive::kPrimInt: {
3075 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003076 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3077 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003078 break;
3079 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003080
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003082 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003083 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003084 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003085 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003086 break;
3087 }
3088
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089 case Primitive::kPrimDouble:
3090 case Primitive::kPrimFloat: {
3091 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003092 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003093 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003094 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003095 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003096
3097 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003098 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003099 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003100}
3101
3102void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3103 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003104 Location first = locations->InAt(0);
3105 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003106 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003107
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003108 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003109 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003110 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003111 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3112 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003113 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3114 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003115 } else {
3116 __ leal(out.AsRegister<CpuRegister>(), Address(
3117 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3118 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003119 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003120 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3121 __ addl(out.AsRegister<CpuRegister>(),
3122 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3123 } else {
3124 __ leal(out.AsRegister<CpuRegister>(), Address(
3125 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3126 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003127 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003128 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003129 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003130 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003131 break;
3132 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003133
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003134 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003135 if (second.IsRegister()) {
3136 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3137 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003138 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3139 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003140 } else {
3141 __ leaq(out.AsRegister<CpuRegister>(), Address(
3142 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3143 }
3144 } else {
3145 DCHECK(second.IsConstant());
3146 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3147 int32_t int32_value = Low32Bits(value);
3148 DCHECK_EQ(int32_value, value);
3149 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3150 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3151 } else {
3152 __ leaq(out.AsRegister<CpuRegister>(), Address(
3153 first.AsRegister<CpuRegister>(), int32_value));
3154 }
3155 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003156 break;
3157 }
3158
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003159 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003160 if (second.IsFpuRegister()) {
3161 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3162 } else if (second.IsConstant()) {
3163 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003164 codegen_->LiteralFloatAddress(
3165 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003166 } else {
3167 DCHECK(second.IsStackSlot());
3168 __ addss(first.AsFpuRegister<XmmRegister>(),
3169 Address(CpuRegister(RSP), second.GetStackIndex()));
3170 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003171 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003172 }
3173
3174 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003175 if (second.IsFpuRegister()) {
3176 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3177 } else if (second.IsConstant()) {
3178 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003179 codegen_->LiteralDoubleAddress(
3180 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003181 } else {
3182 DCHECK(second.IsDoubleStackSlot());
3183 __ addsd(first.AsFpuRegister<XmmRegister>(),
3184 Address(CpuRegister(RSP), second.GetStackIndex()));
3185 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003186 break;
3187 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003188
3189 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003190 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003191 }
3192}
3193
3194void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003195 LocationSummary* locations =
3196 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003197 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003198 case Primitive::kPrimInt: {
3199 locations->SetInAt(0, Location::RequiresRegister());
3200 locations->SetInAt(1, Location::Any());
3201 locations->SetOut(Location::SameAsFirstInput());
3202 break;
3203 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003204 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003205 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003206 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003207 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003208 break;
3209 }
Calin Juravle11351682014-10-23 15:38:15 +01003210 case Primitive::kPrimFloat:
3211 case Primitive::kPrimDouble: {
3212 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003213 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003214 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003215 break;
Calin Juravle11351682014-10-23 15:38:15 +01003216 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003217 default:
Calin Juravle11351682014-10-23 15:38:15 +01003218 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003219 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003220}
3221
3222void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3223 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003224 Location first = locations->InAt(0);
3225 Location second = locations->InAt(1);
3226 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003227 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003228 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003229 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003230 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003231 } else if (second.IsConstant()) {
3232 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003233 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003234 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003235 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003236 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003237 break;
3238 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003239 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003240 if (second.IsConstant()) {
3241 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3242 DCHECK(IsInt<32>(value));
3243 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3244 } else {
3245 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3246 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003247 break;
3248 }
3249
Calin Juravle11351682014-10-23 15:38:15 +01003250 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003251 if (second.IsFpuRegister()) {
3252 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3253 } else if (second.IsConstant()) {
3254 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003255 codegen_->LiteralFloatAddress(
3256 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003257 } else {
3258 DCHECK(second.IsStackSlot());
3259 __ subss(first.AsFpuRegister<XmmRegister>(),
3260 Address(CpuRegister(RSP), second.GetStackIndex()));
3261 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003262 break;
Calin Juravle11351682014-10-23 15:38:15 +01003263 }
3264
3265 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003266 if (second.IsFpuRegister()) {
3267 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3268 } else if (second.IsConstant()) {
3269 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003270 codegen_->LiteralDoubleAddress(
3271 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003272 } else {
3273 DCHECK(second.IsDoubleStackSlot());
3274 __ subsd(first.AsFpuRegister<XmmRegister>(),
3275 Address(CpuRegister(RSP), second.GetStackIndex()));
3276 }
Calin Juravle11351682014-10-23 15:38:15 +01003277 break;
3278 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003279
3280 default:
Calin Juravle11351682014-10-23 15:38:15 +01003281 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003282 }
3283}
3284
Calin Juravle34bacdf2014-10-07 20:23:36 +01003285void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3286 LocationSummary* locations =
3287 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3288 switch (mul->GetResultType()) {
3289 case Primitive::kPrimInt: {
3290 locations->SetInAt(0, Location::RequiresRegister());
3291 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003292 if (mul->InputAt(1)->IsIntConstant()) {
3293 // Can use 3 operand multiply.
3294 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3295 } else {
3296 locations->SetOut(Location::SameAsFirstInput());
3297 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003298 break;
3299 }
3300 case Primitive::kPrimLong: {
3301 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003302 locations->SetInAt(1, Location::Any());
3303 if (mul->InputAt(1)->IsLongConstant() &&
3304 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003305 // Can use 3 operand multiply.
3306 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3307 } else {
3308 locations->SetOut(Location::SameAsFirstInput());
3309 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003310 break;
3311 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003312 case Primitive::kPrimFloat:
3313 case Primitive::kPrimDouble: {
3314 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003315 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003316 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003317 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003318 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003319
3320 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003321 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003322 }
3323}
3324
3325void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3326 LocationSummary* locations = mul->GetLocations();
3327 Location first = locations->InAt(0);
3328 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003329 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003330 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003331 case Primitive::kPrimInt:
3332 // The constant may have ended up in a register, so test explicitly to avoid
3333 // problems where the output may not be the same as the first operand.
3334 if (mul->InputAt(1)->IsIntConstant()) {
3335 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3336 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3337 } else if (second.IsRegister()) {
3338 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003339 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003340 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003341 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003342 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003343 __ imull(first.AsRegister<CpuRegister>(),
3344 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003345 }
3346 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003347 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003348 // The constant may have ended up in a register, so test explicitly to avoid
3349 // problems where the output may not be the same as the first operand.
3350 if (mul->InputAt(1)->IsLongConstant()) {
3351 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3352 if (IsInt<32>(value)) {
3353 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3354 Immediate(static_cast<int32_t>(value)));
3355 } else {
3356 // Have to use the constant area.
3357 DCHECK(first.Equals(out));
3358 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3359 }
3360 } else if (second.IsRegister()) {
3361 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003362 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003363 } else {
3364 DCHECK(second.IsDoubleStackSlot());
3365 DCHECK(first.Equals(out));
3366 __ imulq(first.AsRegister<CpuRegister>(),
3367 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003368 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003369 break;
3370 }
3371
Calin Juravleb5bfa962014-10-21 18:02:24 +01003372 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003373 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003374 if (second.IsFpuRegister()) {
3375 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3376 } else if (second.IsConstant()) {
3377 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003378 codegen_->LiteralFloatAddress(
3379 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003380 } else {
3381 DCHECK(second.IsStackSlot());
3382 __ mulss(first.AsFpuRegister<XmmRegister>(),
3383 Address(CpuRegister(RSP), second.GetStackIndex()));
3384 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003385 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003386 }
3387
3388 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003389 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003390 if (second.IsFpuRegister()) {
3391 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3392 } else if (second.IsConstant()) {
3393 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003394 codegen_->LiteralDoubleAddress(
3395 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003396 } else {
3397 DCHECK(second.IsDoubleStackSlot());
3398 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3399 Address(CpuRegister(RSP), second.GetStackIndex()));
3400 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003401 break;
3402 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003403
3404 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003405 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003406 }
3407}
3408
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003409void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3410 uint32_t stack_adjustment, bool is_float) {
3411 if (source.IsStackSlot()) {
3412 DCHECK(is_float);
3413 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3414 } else if (source.IsDoubleStackSlot()) {
3415 DCHECK(!is_float);
3416 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3417 } else {
3418 // Write the value to the temporary location on the stack and load to FP stack.
3419 if (is_float) {
3420 Location stack_temp = Location::StackSlot(temp_offset);
3421 codegen_->Move(stack_temp, source);
3422 __ flds(Address(CpuRegister(RSP), temp_offset));
3423 } else {
3424 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3425 codegen_->Move(stack_temp, source);
3426 __ fldl(Address(CpuRegister(RSP), temp_offset));
3427 }
3428 }
3429}
3430
3431void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3432 Primitive::Type type = rem->GetResultType();
3433 bool is_float = type == Primitive::kPrimFloat;
3434 size_t elem_size = Primitive::ComponentSize(type);
3435 LocationSummary* locations = rem->GetLocations();
3436 Location first = locations->InAt(0);
3437 Location second = locations->InAt(1);
3438 Location out = locations->Out();
3439
3440 // Create stack space for 2 elements.
3441 // TODO: enhance register allocator to ask for stack temporaries.
3442 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3443
3444 // Load the values to the FP stack in reverse order, using temporaries if needed.
3445 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3446 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3447
3448 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003449 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003450 __ Bind(&retry);
3451 __ fprem();
3452
3453 // Move FP status to AX.
3454 __ fstsw();
3455
3456 // And see if the argument reduction is complete. This is signaled by the
3457 // C2 FPU flag bit set to 0.
3458 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3459 __ j(kNotEqual, &retry);
3460
3461 // We have settled on the final value. Retrieve it into an XMM register.
3462 // Store FP top of stack to real stack.
3463 if (is_float) {
3464 __ fsts(Address(CpuRegister(RSP), 0));
3465 } else {
3466 __ fstl(Address(CpuRegister(RSP), 0));
3467 }
3468
3469 // Pop the 2 items from the FP stack.
3470 __ fucompp();
3471
3472 // Load the value from the stack into an XMM register.
3473 DCHECK(out.IsFpuRegister()) << out;
3474 if (is_float) {
3475 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3476 } else {
3477 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3478 }
3479
3480 // And remove the temporary stack space we allocated.
3481 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3482}
3483
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003484void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3485 DCHECK(instruction->IsDiv() || instruction->IsRem());
3486
3487 LocationSummary* locations = instruction->GetLocations();
3488 Location second = locations->InAt(1);
3489 DCHECK(second.IsConstant());
3490
3491 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3492 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003493 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494
3495 DCHECK(imm == 1 || imm == -1);
3496
3497 switch (instruction->GetResultType()) {
3498 case Primitive::kPrimInt: {
3499 if (instruction->IsRem()) {
3500 __ xorl(output_register, output_register);
3501 } else {
3502 __ movl(output_register, input_register);
3503 if (imm == -1) {
3504 __ negl(output_register);
3505 }
3506 }
3507 break;
3508 }
3509
3510 case Primitive::kPrimLong: {
3511 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003512 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 } else {
3514 __ movq(output_register, input_register);
3515 if (imm == -1) {
3516 __ negq(output_register);
3517 }
3518 }
3519 break;
3520 }
3521
3522 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003523 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003524 }
3525}
3526
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003527void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003528 LocationSummary* locations = instruction->GetLocations();
3529 Location second = locations->InAt(1);
3530
3531 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3532 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3533
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003534 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003535 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3536 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003537
3538 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3539
3540 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003541 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003542 __ testl(numerator, numerator);
3543 __ cmov(kGreaterEqual, tmp, numerator);
3544 int shift = CTZ(imm);
3545 __ sarl(tmp, Immediate(shift));
3546
3547 if (imm < 0) {
3548 __ negl(tmp);
3549 }
3550
3551 __ movl(output_register, tmp);
3552 } else {
3553 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3554 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3555
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003556 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003557 __ addq(rdx, numerator);
3558 __ testq(numerator, numerator);
3559 __ cmov(kGreaterEqual, rdx, numerator);
3560 int shift = CTZ(imm);
3561 __ sarq(rdx, Immediate(shift));
3562
3563 if (imm < 0) {
3564 __ negq(rdx);
3565 }
3566
3567 __ movq(output_register, rdx);
3568 }
3569}
3570
3571void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3572 DCHECK(instruction->IsDiv() || instruction->IsRem());
3573
3574 LocationSummary* locations = instruction->GetLocations();
3575 Location second = locations->InAt(1);
3576
3577 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3578 : locations->GetTemp(0).AsRegister<CpuRegister>();
3579 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3580 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3581 : locations->Out().AsRegister<CpuRegister>();
3582 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3583
3584 DCHECK_EQ(RAX, eax.AsRegister());
3585 DCHECK_EQ(RDX, edx.AsRegister());
3586 if (instruction->IsDiv()) {
3587 DCHECK_EQ(RAX, out.AsRegister());
3588 } else {
3589 DCHECK_EQ(RDX, out.AsRegister());
3590 }
3591
3592 int64_t magic;
3593 int shift;
3594
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003595 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003596 if (instruction->GetResultType() == Primitive::kPrimInt) {
3597 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3598
3599 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3600
3601 __ movl(numerator, eax);
3602
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003603 __ movl(eax, Immediate(magic));
3604 __ imull(numerator);
3605
3606 if (imm > 0 && magic < 0) {
3607 __ addl(edx, numerator);
3608 } else if (imm < 0 && magic > 0) {
3609 __ subl(edx, numerator);
3610 }
3611
3612 if (shift != 0) {
3613 __ sarl(edx, Immediate(shift));
3614 }
3615
3616 __ movl(eax, edx);
3617 __ shrl(edx, Immediate(31));
3618 __ addl(edx, eax);
3619
3620 if (instruction->IsRem()) {
3621 __ movl(eax, numerator);
3622 __ imull(edx, Immediate(imm));
3623 __ subl(eax, edx);
3624 __ movl(edx, eax);
3625 } else {
3626 __ movl(eax, edx);
3627 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003628 } else {
3629 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3630
3631 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3632
3633 CpuRegister rax = eax;
3634 CpuRegister rdx = edx;
3635
3636 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3637
3638 // Save the numerator.
3639 __ movq(numerator, rax);
3640
3641 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003642 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003643
3644 // RDX:RAX = magic * numerator
3645 __ imulq(numerator);
3646
3647 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003648 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003649 __ addq(rdx, numerator);
3650 } else if (imm < 0 && magic > 0) {
3651 // RDX -= numerator
3652 __ subq(rdx, numerator);
3653 }
3654
3655 // Shift if needed.
3656 if (shift != 0) {
3657 __ sarq(rdx, Immediate(shift));
3658 }
3659
3660 // RDX += 1 if RDX < 0
3661 __ movq(rax, rdx);
3662 __ shrq(rdx, Immediate(63));
3663 __ addq(rdx, rax);
3664
3665 if (instruction->IsRem()) {
3666 __ movq(rax, numerator);
3667
3668 if (IsInt<32>(imm)) {
3669 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3670 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003671 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003672 }
3673
3674 __ subq(rax, rdx);
3675 __ movq(rdx, rax);
3676 } else {
3677 __ movq(rax, rdx);
3678 }
3679 }
3680}
3681
Calin Juravlebacfec32014-11-14 15:54:36 +00003682void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3683 DCHECK(instruction->IsDiv() || instruction->IsRem());
3684 Primitive::Type type = instruction->GetResultType();
3685 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3686
3687 bool is_div = instruction->IsDiv();
3688 LocationSummary* locations = instruction->GetLocations();
3689
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003690 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3691 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003692
Roland Levillain271ab9c2014-11-27 15:23:57 +00003693 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003694 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003695
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003696 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003697 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003698
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003699 if (imm == 0) {
3700 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3701 } else if (imm == 1 || imm == -1) {
3702 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003703 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003704 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003705 } else {
3706 DCHECK(imm <= -2 || imm >= 2);
3707 GenerateDivRemWithAnyConstant(instruction);
3708 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003709 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003710 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003711 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003712 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003713 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003714
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003715 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3716 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3717 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3718 // so it's safe to just use negl instead of more complex comparisons.
3719 if (type == Primitive::kPrimInt) {
3720 __ cmpl(second_reg, Immediate(-1));
3721 __ j(kEqual, slow_path->GetEntryLabel());
3722 // edx:eax <- sign-extended of eax
3723 __ cdq();
3724 // eax = quotient, edx = remainder
3725 __ idivl(second_reg);
3726 } else {
3727 __ cmpq(second_reg, Immediate(-1));
3728 __ j(kEqual, slow_path->GetEntryLabel());
3729 // rdx:rax <- sign-extended of rax
3730 __ cqo();
3731 // rax = quotient, rdx = remainder
3732 __ idivq(second_reg);
3733 }
3734 __ Bind(slow_path->GetExitLabel());
3735 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003736}
3737
Calin Juravle7c4954d2014-10-28 16:57:40 +00003738void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3739 LocationSummary* locations =
3740 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3741 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003742 case Primitive::kPrimInt:
3743 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003744 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003745 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003746 locations->SetOut(Location::SameAsFirstInput());
3747 // Intel uses edx:eax as the dividend.
3748 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003749 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3750 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3751 // output and request another temp.
3752 if (div->InputAt(1)->IsConstant()) {
3753 locations->AddTemp(Location::RequiresRegister());
3754 }
Calin Juravled0d48522014-11-04 16:40:20 +00003755 break;
3756 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003757
Calin Juravle7c4954d2014-10-28 16:57:40 +00003758 case Primitive::kPrimFloat:
3759 case Primitive::kPrimDouble: {
3760 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003761 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003762 locations->SetOut(Location::SameAsFirstInput());
3763 break;
3764 }
3765
3766 default:
3767 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3768 }
3769}
3770
3771void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3772 LocationSummary* locations = div->GetLocations();
3773 Location first = locations->InAt(0);
3774 Location second = locations->InAt(1);
3775 DCHECK(first.Equals(locations->Out()));
3776
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003777 Primitive::Type type = div->GetResultType();
3778 switch (type) {
3779 case Primitive::kPrimInt:
3780 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003781 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003782 break;
3783 }
3784
Calin Juravle7c4954d2014-10-28 16:57:40 +00003785 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003786 if (second.IsFpuRegister()) {
3787 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3788 } else if (second.IsConstant()) {
3789 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003790 codegen_->LiteralFloatAddress(
3791 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003792 } else {
3793 DCHECK(second.IsStackSlot());
3794 __ divss(first.AsFpuRegister<XmmRegister>(),
3795 Address(CpuRegister(RSP), second.GetStackIndex()));
3796 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003797 break;
3798 }
3799
3800 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003801 if (second.IsFpuRegister()) {
3802 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3803 } else if (second.IsConstant()) {
3804 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003805 codegen_->LiteralDoubleAddress(
3806 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003807 } else {
3808 DCHECK(second.IsDoubleStackSlot());
3809 __ divsd(first.AsFpuRegister<XmmRegister>(),
3810 Address(CpuRegister(RSP), second.GetStackIndex()));
3811 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003812 break;
3813 }
3814
3815 default:
3816 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3817 }
3818}
3819
Calin Juravlebacfec32014-11-14 15:54:36 +00003820void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003821 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003822 LocationSummary* locations =
3823 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003824
3825 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003826 case Primitive::kPrimInt:
3827 case Primitive::kPrimLong: {
3828 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003829 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003830 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3831 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003832 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3833 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3834 // output and request another temp.
3835 if (rem->InputAt(1)->IsConstant()) {
3836 locations->AddTemp(Location::RequiresRegister());
3837 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003838 break;
3839 }
3840
3841 case Primitive::kPrimFloat:
3842 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003843 locations->SetInAt(0, Location::Any());
3844 locations->SetInAt(1, Location::Any());
3845 locations->SetOut(Location::RequiresFpuRegister());
3846 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003847 break;
3848 }
3849
3850 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003851 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003852 }
3853}
3854
3855void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3856 Primitive::Type type = rem->GetResultType();
3857 switch (type) {
3858 case Primitive::kPrimInt:
3859 case Primitive::kPrimLong: {
3860 GenerateDivRemIntegral(rem);
3861 break;
3862 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003863 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003864 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003865 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003866 break;
3867 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003868 default:
3869 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3870 }
3871}
3872
Calin Juravled0d48522014-11-04 16:40:20 +00003873void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003874 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003875 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003876}
3877
3878void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003879 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003880 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3881 codegen_->AddSlowPath(slow_path);
3882
3883 LocationSummary* locations = instruction->GetLocations();
3884 Location value = locations->InAt(0);
3885
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003886 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003887 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003888 case Primitive::kPrimByte:
3889 case Primitive::kPrimChar:
3890 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003891 case Primitive::kPrimInt: {
3892 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003893 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003894 __ j(kEqual, slow_path->GetEntryLabel());
3895 } else if (value.IsStackSlot()) {
3896 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3897 __ j(kEqual, slow_path->GetEntryLabel());
3898 } else {
3899 DCHECK(value.IsConstant()) << value;
3900 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003901 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003902 }
3903 }
3904 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003905 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003906 case Primitive::kPrimLong: {
3907 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003908 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003909 __ j(kEqual, slow_path->GetEntryLabel());
3910 } else if (value.IsDoubleStackSlot()) {
3911 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3912 __ j(kEqual, slow_path->GetEntryLabel());
3913 } else {
3914 DCHECK(value.IsConstant()) << value;
3915 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003916 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003917 }
3918 }
3919 break;
3920 }
3921 default:
3922 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003923 }
Calin Juravled0d48522014-11-04 16:40:20 +00003924}
3925
Calin Juravle9aec02f2014-11-18 23:06:35 +00003926void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3927 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3928
3929 LocationSummary* locations =
3930 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3931
3932 switch (op->GetResultType()) {
3933 case Primitive::kPrimInt:
3934 case Primitive::kPrimLong: {
3935 locations->SetInAt(0, Location::RequiresRegister());
3936 // The shift count needs to be in CL.
3937 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3938 locations->SetOut(Location::SameAsFirstInput());
3939 break;
3940 }
3941 default:
3942 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3943 }
3944}
3945
3946void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3947 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3948
3949 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003950 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003951 Location second = locations->InAt(1);
3952
3953 switch (op->GetResultType()) {
3954 case Primitive::kPrimInt: {
3955 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003956 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003957 if (op->IsShl()) {
3958 __ shll(first_reg, second_reg);
3959 } else if (op->IsShr()) {
3960 __ sarl(first_reg, second_reg);
3961 } else {
3962 __ shrl(first_reg, second_reg);
3963 }
3964 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003965 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003966 if (op->IsShl()) {
3967 __ shll(first_reg, imm);
3968 } else if (op->IsShr()) {
3969 __ sarl(first_reg, imm);
3970 } else {
3971 __ shrl(first_reg, imm);
3972 }
3973 }
3974 break;
3975 }
3976 case Primitive::kPrimLong: {
3977 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003978 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003979 if (op->IsShl()) {
3980 __ shlq(first_reg, second_reg);
3981 } else if (op->IsShr()) {
3982 __ sarq(first_reg, second_reg);
3983 } else {
3984 __ shrq(first_reg, second_reg);
3985 }
3986 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003987 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003988 if (op->IsShl()) {
3989 __ shlq(first_reg, imm);
3990 } else if (op->IsShr()) {
3991 __ sarq(first_reg, imm);
3992 } else {
3993 __ shrq(first_reg, imm);
3994 }
3995 }
3996 break;
3997 }
3998 default:
3999 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004000 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004001 }
4002}
4003
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004004void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4005 LocationSummary* locations =
4006 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4007
4008 switch (ror->GetResultType()) {
4009 case Primitive::kPrimInt:
4010 case Primitive::kPrimLong: {
4011 locations->SetInAt(0, Location::RequiresRegister());
4012 // The shift count needs to be in CL (unless it is a constant).
4013 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4014 locations->SetOut(Location::SameAsFirstInput());
4015 break;
4016 }
4017 default:
4018 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4019 UNREACHABLE();
4020 }
4021}
4022
4023void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4024 LocationSummary* locations = ror->GetLocations();
4025 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4026 Location second = locations->InAt(1);
4027
4028 switch (ror->GetResultType()) {
4029 case Primitive::kPrimInt:
4030 if (second.IsRegister()) {
4031 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4032 __ rorl(first_reg, second_reg);
4033 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004034 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004035 __ rorl(first_reg, imm);
4036 }
4037 break;
4038 case Primitive::kPrimLong:
4039 if (second.IsRegister()) {
4040 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4041 __ rorq(first_reg, second_reg);
4042 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004043 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004044 __ rorq(first_reg, imm);
4045 }
4046 break;
4047 default:
4048 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4049 UNREACHABLE();
4050 }
4051}
4052
Calin Juravle9aec02f2014-11-18 23:06:35 +00004053void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4054 HandleShift(shl);
4055}
4056
4057void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4058 HandleShift(shl);
4059}
4060
4061void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4062 HandleShift(shr);
4063}
4064
4065void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4066 HandleShift(shr);
4067}
4068
4069void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4070 HandleShift(ushr);
4071}
4072
4073void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4074 HandleShift(ushr);
4075}
4076
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004077void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004078 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004079 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004080 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004081 if (instruction->IsStringAlloc()) {
4082 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4083 } else {
4084 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4085 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4086 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004087 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004088}
4089
4090void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004091 // Note: if heap poisoning is enabled, the entry point takes cares
4092 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004093 if (instruction->IsStringAlloc()) {
4094 // String is allocated through StringFactory. Call NewEmptyString entry point.
4095 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004096 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004097 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4098 __ call(Address(temp, code_offset.SizeValue()));
4099 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4100 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004101 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004102 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4103 DCHECK(!codegen_->IsLeafMethod());
4104 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004105}
4106
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004107void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4108 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004109 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004110 InvokeRuntimeCallingConvention calling_convention;
4111 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004112 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004113 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004114 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004115}
4116
4117void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4118 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004119 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
Andreas Gampea5b09a62016-11-17 15:21:22 -08004120 instruction->GetTypeIndex().index_);
Roland Levillain4d027112015-07-01 15:41:14 +01004121 // Note: if heap poisoning is enabled, the entry point takes cares
4122 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004123 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004124 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004125
4126 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004127}
4128
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004129void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004130 LocationSummary* locations =
4131 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004132 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4133 if (location.IsStackSlot()) {
4134 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4135 } else if (location.IsDoubleStackSlot()) {
4136 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4137 }
4138 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004139}
4140
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004141void InstructionCodeGeneratorX86_64::VisitParameterValue(
4142 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004143 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004144}
4145
4146void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4147 LocationSummary* locations =
4148 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4149 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4150}
4151
4152void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4153 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4154 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004155}
4156
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004157void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4158 LocationSummary* locations =
4159 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4160 locations->SetInAt(0, Location::RequiresRegister());
4161 locations->SetOut(Location::RequiresRegister());
4162}
4163
4164void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4165 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004166 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004167 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004168 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004169 __ movq(locations->Out().AsRegister<CpuRegister>(),
4170 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004171 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004172 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004173 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004174 __ movq(locations->Out().AsRegister<CpuRegister>(),
4175 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4176 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004177 __ movq(locations->Out().AsRegister<CpuRegister>(),
4178 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004179 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004180}
4181
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004182void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004183 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004184 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004185 locations->SetInAt(0, Location::RequiresRegister());
4186 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004187}
4188
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004189void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4190 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004191 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4192 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004193 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004194 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004195 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004196 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004197 break;
4198
4199 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004200 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004201 break;
4202
4203 default:
4204 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4205 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004206}
4207
David Brazdil66d126e2015-04-03 16:02:44 +01004208void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4209 LocationSummary* locations =
4210 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4211 locations->SetInAt(0, Location::RequiresRegister());
4212 locations->SetOut(Location::SameAsFirstInput());
4213}
4214
4215void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004216 LocationSummary* locations = bool_not->GetLocations();
4217 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4218 locations->Out().AsRegister<CpuRegister>().AsRegister());
4219 Location out = locations->Out();
4220 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4221}
4222
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004223void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004224 LocationSummary* locations =
4225 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004226 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004227 locations->SetInAt(i, Location::Any());
4228 }
4229 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004230}
4231
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004232void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004233 LOG(FATAL) << "Unimplemented";
4234}
4235
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004236void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004237 /*
4238 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004239 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004240 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4241 */
4242 switch (kind) {
4243 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004244 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004245 break;
4246 }
4247 case MemBarrierKind::kAnyStore:
4248 case MemBarrierKind::kLoadAny:
4249 case MemBarrierKind::kStoreStore: {
4250 // nop
4251 break;
4252 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004253 case MemBarrierKind::kNTStoreStore:
4254 // Non-Temporal Store/Store needs an explicit fence.
4255 MemoryFence(/* non-temporal */ true);
4256 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004257 }
4258}
4259
4260void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4261 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4262
Roland Levillain0d5a2812015-11-13 10:07:31 +00004263 bool object_field_get_with_read_barrier =
4264 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004265 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004266 new (GetGraph()->GetArena()) LocationSummary(instruction,
4267 object_field_get_with_read_barrier ?
4268 LocationSummary::kCallOnSlowPath :
4269 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004270 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004271 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004272 }
Calin Juravle52c48962014-12-16 17:02:57 +00004273 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004274 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4275 locations->SetOut(Location::RequiresFpuRegister());
4276 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004277 // The output overlaps for an object field get when read barriers
4278 // are enabled: we do not want the move to overwrite the object's
4279 // location, as we need it to emit the read barrier.
4280 locations->SetOut(
4281 Location::RequiresRegister(),
4282 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004283 }
Calin Juravle52c48962014-12-16 17:02:57 +00004284}
4285
4286void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4287 const FieldInfo& field_info) {
4288 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4289
4290 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004291 Location base_loc = locations->InAt(0);
4292 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004293 Location out = locations->Out();
4294 bool is_volatile = field_info.IsVolatile();
4295 Primitive::Type field_type = field_info.GetFieldType();
4296 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4297
4298 switch (field_type) {
4299 case Primitive::kPrimBoolean: {
4300 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4301 break;
4302 }
4303
4304 case Primitive::kPrimByte: {
4305 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4306 break;
4307 }
4308
4309 case Primitive::kPrimShort: {
4310 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4311 break;
4312 }
4313
4314 case Primitive::kPrimChar: {
4315 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4316 break;
4317 }
4318
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004319 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004320 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4321 break;
4322 }
4323
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004324 case Primitive::kPrimNot: {
4325 // /* HeapReference<Object> */ out = *(base + offset)
4326 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004327 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004328 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004329 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004330 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004331 if (is_volatile) {
4332 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4333 }
4334 } else {
4335 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4336 codegen_->MaybeRecordImplicitNullCheck(instruction);
4337 if (is_volatile) {
4338 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4339 }
4340 // If read barriers are enabled, emit read barriers other than
4341 // Baker's using a slow path (and also unpoison the loaded
4342 // reference, if heap poisoning is enabled).
4343 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4344 }
4345 break;
4346 }
4347
Calin Juravle52c48962014-12-16 17:02:57 +00004348 case Primitive::kPrimLong: {
4349 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4350 break;
4351 }
4352
4353 case Primitive::kPrimFloat: {
4354 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4355 break;
4356 }
4357
4358 case Primitive::kPrimDouble: {
4359 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4360 break;
4361 }
4362
4363 case Primitive::kPrimVoid:
4364 LOG(FATAL) << "Unreachable type " << field_type;
4365 UNREACHABLE();
4366 }
4367
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004368 if (field_type == Primitive::kPrimNot) {
4369 // Potential implicit null checks, in the case of reference
4370 // fields, are handled in the previous switch statement.
4371 } else {
4372 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004373 }
Roland Levillain4d027112015-07-01 15:41:14 +01004374
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004375 if (is_volatile) {
4376 if (field_type == Primitive::kPrimNot) {
4377 // Memory barriers, in the case of references, are also handled
4378 // in the previous switch statement.
4379 } else {
4380 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4381 }
Roland Levillain4d027112015-07-01 15:41:14 +01004382 }
Calin Juravle52c48962014-12-16 17:02:57 +00004383}
4384
4385void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4386 const FieldInfo& field_info) {
4387 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4388
4389 LocationSummary* locations =
4390 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004391 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004392 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004393 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004394 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004395
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004396 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004397 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004398 if (is_volatile) {
4399 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4400 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4401 } else {
4402 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4403 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004404 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004405 if (is_volatile) {
4406 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4407 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4408 } else {
4409 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4410 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004411 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004412 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004413 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004414 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004415 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004416 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4417 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004418 locations->AddTemp(Location::RequiresRegister());
4419 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004420}
4421
Calin Juravle52c48962014-12-16 17:02:57 +00004422void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004423 const FieldInfo& field_info,
4424 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004425 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4426
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004427 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004428 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4429 Location value = locations->InAt(1);
4430 bool is_volatile = field_info.IsVolatile();
4431 Primitive::Type field_type = field_info.GetFieldType();
4432 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4433
4434 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004435 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004436 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004437
Mark Mendellea5af682015-10-22 17:35:49 -04004438 bool maybe_record_implicit_null_check_done = false;
4439
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004440 switch (field_type) {
4441 case Primitive::kPrimBoolean:
4442 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004443 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004444 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004445 __ movb(Address(base, offset), Immediate(v));
4446 } else {
4447 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4448 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004449 break;
4450 }
4451
4452 case Primitive::kPrimShort:
4453 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004454 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004455 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004456 __ movw(Address(base, offset), Immediate(v));
4457 } else {
4458 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4459 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004460 break;
4461 }
4462
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004463 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004464 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004465 if (value.IsConstant()) {
4466 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004467 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4468 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4469 // Note: if heap poisoning is enabled, no need to poison
4470 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004471 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004472 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004473 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4474 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4475 __ movl(temp, value.AsRegister<CpuRegister>());
4476 __ PoisonHeapReference(temp);
4477 __ movl(Address(base, offset), temp);
4478 } else {
4479 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4480 }
Mark Mendell40741f32015-04-20 22:10:34 -04004481 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004482 break;
4483 }
4484
4485 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004486 if (value.IsConstant()) {
4487 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004488 codegen_->MoveInt64ToAddress(Address(base, offset),
4489 Address(base, offset + sizeof(int32_t)),
4490 v,
4491 instruction);
4492 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004493 } else {
4494 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4495 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004496 break;
4497 }
4498
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004499 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004500 if (value.IsConstant()) {
4501 int32_t v =
4502 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4503 __ movl(Address(base, offset), Immediate(v));
4504 } else {
4505 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4506 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004507 break;
4508 }
4509
4510 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004511 if (value.IsConstant()) {
4512 int64_t v =
4513 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4514 codegen_->MoveInt64ToAddress(Address(base, offset),
4515 Address(base, offset + sizeof(int32_t)),
4516 v,
4517 instruction);
4518 maybe_record_implicit_null_check_done = true;
4519 } else {
4520 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4521 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004522 break;
4523 }
4524
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004525 case Primitive::kPrimVoid:
4526 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004527 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004528 }
Calin Juravle52c48962014-12-16 17:02:57 +00004529
Mark Mendellea5af682015-10-22 17:35:49 -04004530 if (!maybe_record_implicit_null_check_done) {
4531 codegen_->MaybeRecordImplicitNullCheck(instruction);
4532 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004533
4534 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4535 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4536 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004537 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004538 }
4539
Calin Juravle52c48962014-12-16 17:02:57 +00004540 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004541 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004542 }
4543}
4544
4545void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4546 HandleFieldSet(instruction, instruction->GetFieldInfo());
4547}
4548
4549void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004550 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004551}
4552
4553void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004554 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004555}
4556
4557void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004558 HandleFieldGet(instruction, instruction->GetFieldInfo());
4559}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004560
Calin Juravle52c48962014-12-16 17:02:57 +00004561void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4562 HandleFieldGet(instruction);
4563}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004564
Calin Juravle52c48962014-12-16 17:02:57 +00004565void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4566 HandleFieldGet(instruction, instruction->GetFieldInfo());
4567}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004568
Calin Juravle52c48962014-12-16 17:02:57 +00004569void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4570 HandleFieldSet(instruction, instruction->GetFieldInfo());
4571}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004572
Calin Juravle52c48962014-12-16 17:02:57 +00004573void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004574 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004575}
4576
Calin Juravlee460d1d2015-09-29 04:52:17 +01004577void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4578 HUnresolvedInstanceFieldGet* instruction) {
4579 FieldAccessCallingConventionX86_64 calling_convention;
4580 codegen_->CreateUnresolvedFieldLocationSummary(
4581 instruction, instruction->GetFieldType(), calling_convention);
4582}
4583
4584void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4585 HUnresolvedInstanceFieldGet* instruction) {
4586 FieldAccessCallingConventionX86_64 calling_convention;
4587 codegen_->GenerateUnresolvedFieldAccess(instruction,
4588 instruction->GetFieldType(),
4589 instruction->GetFieldIndex(),
4590 instruction->GetDexPc(),
4591 calling_convention);
4592}
4593
4594void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4595 HUnresolvedInstanceFieldSet* instruction) {
4596 FieldAccessCallingConventionX86_64 calling_convention;
4597 codegen_->CreateUnresolvedFieldLocationSummary(
4598 instruction, instruction->GetFieldType(), calling_convention);
4599}
4600
4601void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4602 HUnresolvedInstanceFieldSet* instruction) {
4603 FieldAccessCallingConventionX86_64 calling_convention;
4604 codegen_->GenerateUnresolvedFieldAccess(instruction,
4605 instruction->GetFieldType(),
4606 instruction->GetFieldIndex(),
4607 instruction->GetDexPc(),
4608 calling_convention);
4609}
4610
4611void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4612 HUnresolvedStaticFieldGet* instruction) {
4613 FieldAccessCallingConventionX86_64 calling_convention;
4614 codegen_->CreateUnresolvedFieldLocationSummary(
4615 instruction, instruction->GetFieldType(), calling_convention);
4616}
4617
4618void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4619 HUnresolvedStaticFieldGet* instruction) {
4620 FieldAccessCallingConventionX86_64 calling_convention;
4621 codegen_->GenerateUnresolvedFieldAccess(instruction,
4622 instruction->GetFieldType(),
4623 instruction->GetFieldIndex(),
4624 instruction->GetDexPc(),
4625 calling_convention);
4626}
4627
4628void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4629 HUnresolvedStaticFieldSet* instruction) {
4630 FieldAccessCallingConventionX86_64 calling_convention;
4631 codegen_->CreateUnresolvedFieldLocationSummary(
4632 instruction, instruction->GetFieldType(), calling_convention);
4633}
4634
4635void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4636 HUnresolvedStaticFieldSet* instruction) {
4637 FieldAccessCallingConventionX86_64 calling_convention;
4638 codegen_->GenerateUnresolvedFieldAccess(instruction,
4639 instruction->GetFieldType(),
4640 instruction->GetFieldIndex(),
4641 instruction->GetDexPc(),
4642 calling_convention);
4643}
4644
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004645void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004646 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4647 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4648 ? Location::RequiresRegister()
4649 : Location::Any();
4650 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004651}
4652
Calin Juravle2ae48182016-03-16 14:05:09 +00004653void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4654 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004655 return;
4656 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004657 LocationSummary* locations = instruction->GetLocations();
4658 Location obj = locations->InAt(0);
4659
4660 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004661 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004662}
4663
Calin Juravle2ae48182016-03-16 14:05:09 +00004664void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004665 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004666 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004667
4668 LocationSummary* locations = instruction->GetLocations();
4669 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004670
4671 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004672 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004673 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004674 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004675 } else {
4676 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004677 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004678 __ jmp(slow_path->GetEntryLabel());
4679 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004680 }
4681 __ j(kEqual, slow_path->GetEntryLabel());
4682}
4683
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004684void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004685 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004686}
4687
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004688void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004689 bool object_array_get_with_read_barrier =
4690 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004691 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004692 new (GetGraph()->GetArena()) LocationSummary(instruction,
4693 object_array_get_with_read_barrier ?
4694 LocationSummary::kCallOnSlowPath :
4695 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004696 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004697 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004698 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004699 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004700 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004701 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4702 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4703 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004704 // The output overlaps for an object array get when read barriers
4705 // are enabled: we do not want the move to overwrite the array's
4706 // location, as we need it to emit the read barrier.
4707 locations->SetOut(
4708 Location::RequiresRegister(),
4709 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004710 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004711}
4712
4713void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4714 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004715 Location obj_loc = locations->InAt(0);
4716 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004717 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004719 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004720
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004721 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004722 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004723 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004724 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004725 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004726 break;
4727 }
4728
4729 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004730 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004731 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004732 break;
4733 }
4734
4735 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004736 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004737 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004738 break;
4739 }
4740
4741 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004742 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004743 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4744 // Branch cases into compressed and uncompressed for each index's type.
4745 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4746 NearLabel done, not_compressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004747 __ testl(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004748 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004749 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4750 "Expecting 0=compressed, 1=uncompressed");
4751 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004752 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4753 __ jmp(&done);
4754 __ Bind(&not_compressed);
4755 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4756 __ Bind(&done);
4757 } else {
4758 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4759 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004760 break;
4761 }
4762
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004763 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004764 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004765 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004766 break;
4767 }
4768
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004769 case Primitive::kPrimNot: {
4770 static_assert(
4771 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4772 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004773 // /* HeapReference<Object> */ out =
4774 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4775 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004776 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004777 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004778 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004779 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004780 } else {
4781 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004782 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4783 codegen_->MaybeRecordImplicitNullCheck(instruction);
4784 // If read barriers are enabled, emit read barriers other than
4785 // Baker's using a slow path (and also unpoison the loaded
4786 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004787 if (index.IsConstant()) {
4788 uint32_t offset =
4789 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004790 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4791 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004792 codegen_->MaybeGenerateReadBarrierSlow(
4793 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4794 }
4795 }
4796 break;
4797 }
4798
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004799 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004800 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004801 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004802 break;
4803 }
4804
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004805 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004806 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004807 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004808 break;
4809 }
4810
4811 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004812 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004813 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004814 break;
4815 }
4816
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004817 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004818 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004819 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004820 }
Roland Levillain4d027112015-07-01 15:41:14 +01004821
4822 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004823 // Potential implicit null checks, in the case of reference
4824 // arrays, are handled in the previous switch statement.
4825 } else {
4826 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004827 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004828}
4829
4830void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004831 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004832
4833 bool needs_write_barrier =
4834 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004835 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004836
Nicolas Geoffray39468442014-09-02 15:17:15 +01004837 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004838 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004839 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004840 LocationSummary::kCallOnSlowPath :
4841 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004842
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004843 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004844 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4845 if (Primitive::IsFloatingPointType(value_type)) {
4846 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004847 } else {
4848 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4849 }
4850
4851 if (needs_write_barrier) {
4852 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004853 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004854 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004855 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004856}
4857
4858void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4859 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004860 Location array_loc = locations->InAt(0);
4861 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004862 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004863 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004864 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004865 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004866 bool needs_write_barrier =
4867 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004868 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4869 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4870 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004871
4872 switch (value_type) {
4873 case Primitive::kPrimBoolean:
4874 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004875 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004876 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004877 if (value.IsRegister()) {
4878 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004879 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004880 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004881 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004882 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004883 break;
4884 }
4885
4886 case Primitive::kPrimShort:
4887 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004888 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004889 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004890 if (value.IsRegister()) {
4891 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004892 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004893 DCHECK(value.IsConstant()) << value;
4894 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004895 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004896 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004897 break;
4898 }
4899
4900 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004901 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004902 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004903
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004904 if (!value.IsRegister()) {
4905 // Just setting null.
4906 DCHECK(instruction->InputAt(2)->IsNullConstant());
4907 DCHECK(value.IsConstant()) << value;
4908 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004909 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004910 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004911 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004912 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004913 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004914
4915 DCHECK(needs_write_barrier);
4916 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004917 // We cannot use a NearLabel for `done`, as its range may be too
4918 // short when Baker read barriers are enabled.
4919 Label done;
4920 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004921 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004922 Location temp_loc = locations->GetTemp(0);
4923 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004924 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004925 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4926 codegen_->AddSlowPath(slow_path);
4927 if (instruction->GetValueCanBeNull()) {
4928 __ testl(register_value, register_value);
4929 __ j(kNotEqual, &not_null);
4930 __ movl(address, Immediate(0));
4931 codegen_->MaybeRecordImplicitNullCheck(instruction);
4932 __ jmp(&done);
4933 __ Bind(&not_null);
4934 }
4935
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004936 // Note that when Baker read barriers are enabled, the type
4937 // checks are performed without read barriers. This is fine,
4938 // even in the case where a class object is in the from-space
4939 // after the flip, as a comparison involving such a type would
4940 // not produce a false positive; it may of course produce a
4941 // false negative, in which case we would take the ArraySet
4942 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004943
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004944 // /* HeapReference<Class> */ temp = array->klass_
4945 __ movl(temp, Address(array, class_offset));
4946 codegen_->MaybeRecordImplicitNullCheck(instruction);
4947 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004948
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004949 // /* HeapReference<Class> */ temp = temp->component_type_
4950 __ movl(temp, Address(temp, component_offset));
4951 // If heap poisoning is enabled, no need to unpoison `temp`
4952 // nor the object reference in `register_value->klass`, as
4953 // we are comparing two poisoned references.
4954 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004955
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004956 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4957 __ j(kEqual, &do_put);
4958 // If heap poisoning is enabled, the `temp` reference has
4959 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004960 __ MaybeUnpoisonHeapReference(temp);
4961
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004962 // If heap poisoning is enabled, no need to unpoison the
4963 // heap reference loaded below, as it is only used for a
4964 // comparison with null.
4965 __ cmpl(Address(temp, super_offset), Immediate(0));
4966 __ j(kNotEqual, slow_path->GetEntryLabel());
4967 __ Bind(&do_put);
4968 } else {
4969 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004970 }
4971 }
4972
4973 if (kPoisonHeapReferences) {
4974 __ movl(temp, register_value);
4975 __ PoisonHeapReference(temp);
4976 __ movl(address, temp);
4977 } else {
4978 __ movl(address, register_value);
4979 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004980 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004981 codegen_->MaybeRecordImplicitNullCheck(instruction);
4982 }
4983
4984 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4985 codegen_->MarkGCCard(
4986 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4987 __ Bind(&done);
4988
4989 if (slow_path != nullptr) {
4990 __ Bind(slow_path->GetExitLabel());
4991 }
4992
4993 break;
4994 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004995
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004996 case Primitive::kPrimInt: {
4997 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004998 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004999 if (value.IsRegister()) {
5000 __ movl(address, value.AsRegister<CpuRegister>());
5001 } else {
5002 DCHECK(value.IsConstant()) << value;
5003 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5004 __ movl(address, Immediate(v));
5005 }
5006 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005007 break;
5008 }
5009
5010 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005011 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005012 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005013 if (value.IsRegister()) {
5014 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005015 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005016 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005017 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005018 Address address_high =
5019 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005020 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005021 }
5022 break;
5023 }
5024
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005025 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005026 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005027 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005028 if (value.IsFpuRegister()) {
5029 __ movss(address, value.AsFpuRegister<XmmRegister>());
5030 } else {
5031 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005032 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005033 __ movl(address, Immediate(v));
5034 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005035 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005036 break;
5037 }
5038
5039 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005040 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005041 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005042 if (value.IsFpuRegister()) {
5043 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5044 codegen_->MaybeRecordImplicitNullCheck(instruction);
5045 } else {
5046 int64_t v =
5047 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005048 Address address_high =
5049 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005050 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5051 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005052 break;
5053 }
5054
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005055 case Primitive::kPrimVoid:
5056 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005057 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005058 }
5059}
5060
5061void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005062 LocationSummary* locations =
5063 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005064 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005065 if (!instruction->IsEmittedAtUseSite()) {
5066 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5067 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005068}
5069
5070void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005071 if (instruction->IsEmittedAtUseSite()) {
5072 return;
5073 }
5074
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005075 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005076 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005077 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5078 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005079 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005080 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005081 // Mask out most significant bit in case the array is String's array of char.
5082 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005083 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005084 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005085}
5086
5087void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005088 RegisterSet caller_saves = RegisterSet::Empty();
5089 InvokeRuntimeCallingConvention calling_convention;
5090 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5091 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5092 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005093 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005094 HInstruction* length = instruction->InputAt(1);
5095 if (!length->IsEmittedAtUseSite()) {
5096 locations->SetInAt(1, Location::RegisterOrConstant(length));
5097 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005098}
5099
5100void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5101 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005102 Location index_loc = locations->InAt(0);
5103 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005104 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005105
Mark Mendell99dbd682015-04-22 16:18:52 -04005106 if (length_loc.IsConstant()) {
5107 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5108 if (index_loc.IsConstant()) {
5109 // BCE will remove the bounds check if we are guarenteed to pass.
5110 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5111 if (index < 0 || index >= length) {
5112 codegen_->AddSlowPath(slow_path);
5113 __ jmp(slow_path->GetEntryLabel());
5114 } else {
5115 // Some optimization after BCE may have generated this, and we should not
5116 // generate a bounds check if it is a valid range.
5117 }
5118 return;
5119 }
5120
5121 // We have to reverse the jump condition because the length is the constant.
5122 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5123 __ cmpl(index_reg, Immediate(length));
5124 codegen_->AddSlowPath(slow_path);
5125 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005126 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005127 HInstruction* array_length = instruction->InputAt(1);
5128 if (array_length->IsEmittedAtUseSite()) {
5129 // Address the length field in the array.
5130 DCHECK(array_length->IsArrayLength());
5131 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5132 Location array_loc = array_length->GetLocations()->InAt(0);
5133 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005134 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005135 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5136 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005137 CpuRegister length_reg = CpuRegister(TMP);
5138 __ movl(length_reg, array_len);
5139 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005140 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005141 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005142 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005143 // Checking the bound for general case:
5144 // Array of char or String's array when the compression feature off.
5145 if (index_loc.IsConstant()) {
5146 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5147 __ cmpl(array_len, Immediate(value));
5148 } else {
5149 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5150 }
5151 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005152 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005153 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005154 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005155 }
5156 codegen_->AddSlowPath(slow_path);
5157 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005158 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005159}
5160
5161void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5162 CpuRegister card,
5163 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005164 CpuRegister value,
5165 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005166 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005167 if (value_can_be_null) {
5168 __ testl(value, value);
5169 __ j(kEqual, &is_null);
5170 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005171 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005172 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005173 __ movq(temp, object);
5174 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005175 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005176 if (value_can_be_null) {
5177 __ Bind(&is_null);
5178 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005179}
5180
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005181void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005182 LOG(FATAL) << "Unimplemented";
5183}
5184
5185void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005186 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5187}
5188
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005189void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005190 LocationSummary* locations =
5191 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005192 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005193}
5194
5195void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005196 HBasicBlock* block = instruction->GetBlock();
5197 if (block->GetLoopInformation() != nullptr) {
5198 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5199 // The back edge will generate the suspend check.
5200 return;
5201 }
5202 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5203 // The goto will generate the suspend check.
5204 return;
5205 }
5206 GenerateSuspendCheck(instruction, nullptr);
5207}
5208
5209void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5210 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005211 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005212 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5213 if (slow_path == nullptr) {
5214 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5215 instruction->SetSlowPath(slow_path);
5216 codegen_->AddSlowPath(slow_path);
5217 if (successor != nullptr) {
5218 DCHECK(successor->IsLoopHeader());
5219 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5220 }
5221 } else {
5222 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5223 }
5224
Andreas Gampe542451c2016-07-26 09:02:02 -07005225 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005226 /* no_rip */ true),
5227 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005228 if (successor == nullptr) {
5229 __ j(kNotEqual, slow_path->GetEntryLabel());
5230 __ Bind(slow_path->GetReturnLabel());
5231 } else {
5232 __ j(kEqual, codegen_->GetLabelOf(successor));
5233 __ jmp(slow_path->GetEntryLabel());
5234 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005235}
5236
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005237X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5238 return codegen_->GetAssembler();
5239}
5240
5241void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005242 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005243 Location source = move->GetSource();
5244 Location destination = move->GetDestination();
5245
5246 if (source.IsRegister()) {
5247 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005248 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005249 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005250 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005251 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005252 } else {
5253 DCHECK(destination.IsDoubleStackSlot());
5254 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005255 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005256 }
5257 } else if (source.IsStackSlot()) {
5258 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005259 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005260 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005261 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005262 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005263 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005264 } else {
5265 DCHECK(destination.IsStackSlot());
5266 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5267 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5268 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005269 } else if (source.IsDoubleStackSlot()) {
5270 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005271 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005272 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005274 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5275 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005276 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005277 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005278 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5279 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5280 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005281 } else if (source.IsConstant()) {
5282 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005283 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5284 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005285 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005286 if (value == 0) {
5287 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5288 } else {
5289 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5290 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005291 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005292 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005293 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005294 }
5295 } else if (constant->IsLongConstant()) {
5296 int64_t value = constant->AsLongConstant()->GetValue();
5297 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005298 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005299 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005300 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005301 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005302 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005303 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005304 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005305 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005306 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005307 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005308 } else {
5309 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005310 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005311 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5312 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005313 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005314 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005315 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005316 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005317 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005318 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005319 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005320 } else {
5321 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005322 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005323 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005324 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005325 } else if (source.IsFpuRegister()) {
5326 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005327 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005328 } else if (destination.IsStackSlot()) {
5329 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005330 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005331 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005332 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005333 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005334 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005335 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005336 }
5337}
5338
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005339void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005340 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005341 __ movl(Address(CpuRegister(RSP), mem), reg);
5342 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005343}
5344
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005345void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005346 ScratchRegisterScope ensure_scratch(
5347 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5348
5349 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5350 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5351 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5352 Address(CpuRegister(RSP), mem2 + stack_offset));
5353 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5354 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5355 CpuRegister(ensure_scratch.GetRegister()));
5356}
5357
Mark Mendell8a1c7282015-06-29 15:41:28 -04005358void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5359 __ movq(CpuRegister(TMP), reg1);
5360 __ movq(reg1, reg2);
5361 __ movq(reg2, CpuRegister(TMP));
5362}
5363
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005364void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5365 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5366 __ movq(Address(CpuRegister(RSP), mem), reg);
5367 __ movq(reg, CpuRegister(TMP));
5368}
5369
5370void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5371 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005372 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005373
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005374 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5375 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5376 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5377 Address(CpuRegister(RSP), mem2 + stack_offset));
5378 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5379 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5380 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005381}
5382
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005383void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5384 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5385 __ movss(Address(CpuRegister(RSP), mem), reg);
5386 __ movd(reg, CpuRegister(TMP));
5387}
5388
5389void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5390 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5391 __ movsd(Address(CpuRegister(RSP), mem), reg);
5392 __ movd(reg, CpuRegister(TMP));
5393}
5394
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005395void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005396 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005397 Location source = move->GetSource();
5398 Location destination = move->GetDestination();
5399
5400 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005401 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005402 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005403 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005404 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005405 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005406 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005407 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5408 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005409 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005410 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005411 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005412 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5413 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005414 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005415 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5416 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5417 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005418 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005419 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005420 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005421 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005422 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005423 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005424 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005425 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005426 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005427 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005428 }
5429}
5430
5431
5432void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5433 __ pushq(CpuRegister(reg));
5434}
5435
5436
5437void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5438 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005439}
5440
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005441void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005442 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005443 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5444 Immediate(mirror::Class::kStatusInitialized));
5445 __ j(kLess, slow_path->GetEntryLabel());
5446 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005447 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005448}
5449
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005450HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5451 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005452 switch (desired_class_load_kind) {
5453 case HLoadClass::LoadKind::kReferrersClass:
5454 break;
5455 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5456 DCHECK(!GetCompilerOptions().GetCompilePic());
5457 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5458 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5459 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5460 DCHECK(GetCompilerOptions().GetCompilePic());
5461 break;
5462 case HLoadClass::LoadKind::kBootImageAddress:
5463 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005464 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005465 break;
5466 case HLoadClass::LoadKind::kDexCachePcRelative:
5467 DCHECK(!Runtime::Current()->UseJitCompilation());
5468 break;
5469 case HLoadClass::LoadKind::kDexCacheViaMethod:
5470 break;
5471 }
5472 return desired_class_load_kind;
5473}
5474
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005475void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005476 if (cls->NeedsAccessCheck()) {
5477 InvokeRuntimeCallingConvention calling_convention;
5478 CodeGenerator::CreateLoadClassLocationSummary(
5479 cls,
5480 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5481 Location::RegisterLocation(RAX),
5482 /* code_generator_supports_read_barrier */ true);
5483 return;
5484 }
5485
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005486 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5487 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005488 ? LocationSummary::kCallOnSlowPath
5489 : LocationSummary::kNoCall;
5490 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005491 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005492 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005493 }
5494
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005495 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5496 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5497 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5498 locations->SetInAt(0, Location::RequiresRegister());
5499 }
5500 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005501}
5502
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005503Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
5504 dex::TypeIndex dex_index,
5505 uint64_t address) {
5506 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
5507 // Add a patch entry and return the label.
5508 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
5509 PatchInfo<Label>* info = &jit_class_patches_.back();
5510 return &info->label;
5511}
5512
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005513void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005514 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005515 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08005516 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescuba45db02016-07-12 22:53:02 +01005517 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005518 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005519 return;
5520 }
5521
Roland Levillain0d5a2812015-11-13 10:07:31 +00005522 Location out_loc = locations->Out();
5523 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005524
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005525 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5526 ? kWithoutReadBarrier
5527 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005528 bool generate_null_check = false;
5529 switch (cls->GetLoadKind()) {
5530 case HLoadClass::LoadKind::kReferrersClass: {
5531 DCHECK(!cls->CanCallRuntime());
5532 DCHECK(!cls->MustGenerateClinitCheck());
5533 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5534 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5535 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005536 cls,
5537 out_loc,
5538 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005539 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005540 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005541 break;
5542 }
5543 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005544 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005545 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5546 codegen_->RecordTypePatch(cls);
5547 break;
5548 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005549 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005550 DCHECK_NE(cls->GetAddress(), 0u);
5551 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5552 __ movl(out, Immediate(address)); // Zero-extended.
5553 codegen_->RecordSimplePatch();
5554 break;
5555 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005556 case HLoadClass::LoadKind::kJitTableAddress: {
5557 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5558 /* no_rip */ true);
5559 Label* fixup_label =
5560 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005561 // /* GcRoot<mirror::Class> */ out = *address
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005562 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005563 break;
5564 }
5565 case HLoadClass::LoadKind::kDexCachePcRelative: {
5566 uint32_t offset = cls->GetDexCacheElementOffset();
5567 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5568 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5569 /* no_rip */ false);
5570 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005571 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005572 generate_null_check = !cls->IsInDexCache();
5573 break;
5574 }
5575 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5576 // /* GcRoot<mirror::Class>[] */ out =
5577 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5578 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5579 __ movq(out,
5580 Address(current_method,
5581 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5582 // /* GcRoot<mirror::Class> */ out = out[type_index]
5583 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005584 cls,
5585 out_loc,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005586 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
Roland Levillain00468f32016-10-27 18:02:48 +01005587 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005588 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005589 generate_null_check = !cls->IsInDexCache();
5590 break;
5591 }
5592 default:
5593 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5594 UNREACHABLE();
5595 }
5596
5597 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5598 DCHECK(cls->CanCallRuntime());
5599 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5600 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5601 codegen_->AddSlowPath(slow_path);
5602 if (generate_null_check) {
5603 __ testl(out, out);
5604 __ j(kEqual, slow_path->GetEntryLabel());
5605 }
5606 if (cls->MustGenerateClinitCheck()) {
5607 GenerateClassInitializationCheck(slow_path, out);
5608 } else {
5609 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005610 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005611 }
5612}
5613
5614void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5615 LocationSummary* locations =
5616 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5617 locations->SetInAt(0, Location::RequiresRegister());
5618 if (check->HasUses()) {
5619 locations->SetOut(Location::SameAsFirstInput());
5620 }
5621}
5622
5623void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005624 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005625 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005626 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005627 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005628 GenerateClassInitializationCheck(slow_path,
5629 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005630}
5631
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005632HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5633 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005634 switch (desired_string_load_kind) {
5635 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5636 DCHECK(!GetCompilerOptions().GetCompilePic());
5637 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5638 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5639 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5640 DCHECK(GetCompilerOptions().GetCompilePic());
5641 break;
5642 case HLoadString::LoadKind::kBootImageAddress:
5643 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005644 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005645 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005646 break;
5647 case HLoadString::LoadKind::kDexCacheViaMethod:
5648 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005649 case HLoadString::LoadKind::kJitTableAddress:
5650 DCHECK(Runtime::Current()->UseJitCompilation());
5651 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005652 }
5653 return desired_string_load_kind;
5654}
5655
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005656void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005657 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005658 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005659 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005660 locations->SetOut(Location::RegisterLocation(RAX));
5661 } else {
5662 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005663 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5664 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5665 // Rely on the pResolveString and/or marking to save everything.
5666 // Custom calling convention: RAX serves as both input and output.
5667 RegisterSet caller_saves = RegisterSet::Empty();
5668 caller_saves.Add(Location::RegisterLocation(RAX));
5669 locations->SetCustomSlowPathCallerSaves(caller_saves);
5670 } else {
5671 // For non-Baker read barrier we have a temp-clobbering call.
5672 }
5673 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005674 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005675}
5676
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005677Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
5678 dex::StringIndex dex_index) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005679 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u);
5680 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005681 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005682 PatchInfo<Label>* info = &jit_string_patches_.back();
5683 return &info->label;
5684}
5685
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005686void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005687 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005688 Location out_loc = locations->Out();
5689 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005690
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005691 switch (load->GetLoadKind()) {
5692 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005693 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005694 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005695 return; // No dex cache slow path.
5696 }
5697 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005698 DCHECK_NE(load->GetAddress(), 0u);
5699 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5700 __ movl(out, Immediate(address)); // Zero-extended.
5701 codegen_->RecordSimplePatch();
5702 return; // No dex cache slow path.
5703 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005704 case HLoadString::LoadKind::kBssEntry: {
5705 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5706 /* no_rip */ false);
5707 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5708 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005709 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005710 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5711 codegen_->AddSlowPath(slow_path);
5712 __ testl(out, out);
5713 __ j(kEqual, slow_path->GetEntryLabel());
5714 __ Bind(slow_path->GetExitLabel());
5715 return;
5716 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005717 case HLoadString::LoadKind::kJitTableAddress: {
5718 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5719 /* no_rip */ true);
5720 Label* fixup_label =
5721 codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex());
5722 // /* GcRoot<mirror::String> */ out = *address
5723 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5724 return;
5725 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005726 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005727 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005728 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005729
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005730 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005731 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005732 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005733 codegen_->InvokeRuntime(kQuickResolveString,
5734 load,
5735 load->GetDexPc());
5736 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005737}
5738
David Brazdilcb1c0552015-08-04 16:22:25 +01005739static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005740 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005741 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005742}
5743
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005744void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5745 LocationSummary* locations =
5746 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5747 locations->SetOut(Location::RequiresRegister());
5748}
5749
5750void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005751 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5752}
5753
5754void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5755 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5756}
5757
5758void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5759 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005760}
5761
5762void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5763 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005764 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005765 InvokeRuntimeCallingConvention calling_convention;
5766 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5767}
5768
5769void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005770 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005771 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005772}
5773
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005774static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5775 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005776 // We need a temporary for holding the iftable length.
5777 return true;
5778 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005779 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005780 !kUseBakerReadBarrier &&
5781 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005782 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5783 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5784}
5785
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005786static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5787 return kEmitCompilerReadBarrier &&
5788 !kUseBakerReadBarrier &&
5789 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5790 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5791 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5792}
5793
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005794void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005795 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005796 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005797 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005798 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005799 case TypeCheckKind::kExactCheck:
5800 case TypeCheckKind::kAbstractClassCheck:
5801 case TypeCheckKind::kClassHierarchyCheck:
5802 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005803 call_kind =
5804 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005805 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005806 break;
5807 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005808 case TypeCheckKind::kUnresolvedCheck:
5809 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005810 call_kind = LocationSummary::kCallOnSlowPath;
5811 break;
5812 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005813
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005814 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005815 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005816 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005817 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818 locations->SetInAt(0, Location::RequiresRegister());
5819 locations->SetInAt(1, Location::Any());
5820 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5821 locations->SetOut(Location::RequiresRegister());
5822 // When read barriers are enabled, we need a temporary register for
5823 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005824 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005825 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005826 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005827}
5828
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005829void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005830 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005831 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005832 Location obj_loc = locations->InAt(0);
5833 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005834 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005835 Location out_loc = locations->Out();
5836 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005837 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005838 locations->GetTemp(0) :
5839 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005840 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5842 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5843 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005844 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005845 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005846
5847 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005848 // Avoid null check if we know obj is not null.
5849 if (instruction->MustDoNullCheck()) {
5850 __ testl(obj, obj);
5851 __ j(kEqual, &zero);
5852 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005853
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005854 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005855 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005856 // /* HeapReference<Class> */ out = obj->klass_
5857 GenerateReferenceLoadTwoRegisters(instruction,
5858 out_loc,
5859 obj_loc,
5860 class_offset,
5861 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005862 if (cls.IsRegister()) {
5863 __ cmpl(out, cls.AsRegister<CpuRegister>());
5864 } else {
5865 DCHECK(cls.IsStackSlot()) << cls;
5866 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5867 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005868 if (zero.IsLinked()) {
5869 // Classes must be equal for the instanceof to succeed.
5870 __ j(kNotEqual, &zero);
5871 __ movl(out, Immediate(1));
5872 __ jmp(&done);
5873 } else {
5874 __ setcc(kEqual, out);
5875 // setcc only sets the low byte.
5876 __ andl(out, Immediate(1));
5877 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005878 break;
5879 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005880
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005882 // /* HeapReference<Class> */ out = obj->klass_
5883 GenerateReferenceLoadTwoRegisters(instruction,
5884 out_loc,
5885 obj_loc,
5886 class_offset,
5887 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005888 // If the class is abstract, we eagerly fetch the super class of the
5889 // object to avoid doing a comparison we know will fail.
5890 NearLabel loop, success;
5891 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005892 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005893 GenerateReferenceLoadOneRegister(instruction,
5894 out_loc,
5895 super_offset,
5896 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005897 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005898 __ testl(out, out);
5899 // If `out` is null, we use it for the result, and jump to `done`.
5900 __ j(kEqual, &done);
5901 if (cls.IsRegister()) {
5902 __ cmpl(out, cls.AsRegister<CpuRegister>());
5903 } else {
5904 DCHECK(cls.IsStackSlot()) << cls;
5905 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5906 }
5907 __ j(kNotEqual, &loop);
5908 __ movl(out, Immediate(1));
5909 if (zero.IsLinked()) {
5910 __ jmp(&done);
5911 }
5912 break;
5913 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005914
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005915 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005916 // /* HeapReference<Class> */ out = obj->klass_
5917 GenerateReferenceLoadTwoRegisters(instruction,
5918 out_loc,
5919 obj_loc,
5920 class_offset,
5921 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005922 // Walk over the class hierarchy to find a match.
5923 NearLabel loop, success;
5924 __ Bind(&loop);
5925 if (cls.IsRegister()) {
5926 __ cmpl(out, cls.AsRegister<CpuRegister>());
5927 } else {
5928 DCHECK(cls.IsStackSlot()) << cls;
5929 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5930 }
5931 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005932 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005933 GenerateReferenceLoadOneRegister(instruction,
5934 out_loc,
5935 super_offset,
5936 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005937 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005938 __ testl(out, out);
5939 __ j(kNotEqual, &loop);
5940 // If `out` is null, we use it for the result, and jump to `done`.
5941 __ jmp(&done);
5942 __ Bind(&success);
5943 __ movl(out, Immediate(1));
5944 if (zero.IsLinked()) {
5945 __ jmp(&done);
5946 }
5947 break;
5948 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005949
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005950 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005951 // /* HeapReference<Class> */ out = obj->klass_
5952 GenerateReferenceLoadTwoRegisters(instruction,
5953 out_loc,
5954 obj_loc,
5955 class_offset,
5956 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005957 // Do an exact check.
5958 NearLabel exact_check;
5959 if (cls.IsRegister()) {
5960 __ cmpl(out, cls.AsRegister<CpuRegister>());
5961 } else {
5962 DCHECK(cls.IsStackSlot()) << cls;
5963 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5964 }
5965 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005966 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005967 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005968 GenerateReferenceLoadOneRegister(instruction,
5969 out_loc,
5970 component_offset,
5971 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005972 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005973 __ testl(out, out);
5974 // If `out` is null, we use it for the result, and jump to `done`.
5975 __ j(kEqual, &done);
5976 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5977 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005978 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005979 __ movl(out, Immediate(1));
5980 __ jmp(&done);
5981 break;
5982 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005983
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005984 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005985 // No read barrier since the slow path will retry upon failure.
5986 // /* HeapReference<Class> */ out = obj->klass_
5987 GenerateReferenceLoadTwoRegisters(instruction,
5988 out_loc,
5989 obj_loc,
5990 class_offset,
5991 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005992 if (cls.IsRegister()) {
5993 __ cmpl(out, cls.AsRegister<CpuRegister>());
5994 } else {
5995 DCHECK(cls.IsStackSlot()) << cls;
5996 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5997 }
5998 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005999 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6000 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006001 codegen_->AddSlowPath(slow_path);
6002 __ j(kNotEqual, slow_path->GetEntryLabel());
6003 __ movl(out, Immediate(1));
6004 if (zero.IsLinked()) {
6005 __ jmp(&done);
6006 }
6007 break;
6008 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006009
Calin Juravle98893e12015-10-02 21:05:03 +01006010 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006011 case TypeCheckKind::kInterfaceCheck: {
6012 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006013 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006014 // cases.
6015 //
6016 // We cannot directly call the InstanceofNonTrivial runtime
6017 // entry point without resorting to a type checking slow path
6018 // here (i.e. by calling InvokeRuntime directly), as it would
6019 // require to assign fixed registers for the inputs of this
6020 // HInstanceOf instruction (following the runtime calling
6021 // convention), which might be cluttered by the potential first
6022 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006023 //
6024 // TODO: Introduce a new runtime entry point taking the object
6025 // to test (instead of its class) as argument, and let it deal
6026 // with the read barrier issues. This will let us refactor this
6027 // case of the `switch` code as it was previously (with a direct
6028 // call to the runtime not using a type checking slow path).
6029 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006030 DCHECK(locations->OnlyCallsOnSlowPath());
6031 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6032 /* is_fatal */ false);
6033 codegen_->AddSlowPath(slow_path);
6034 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006035 if (zero.IsLinked()) {
6036 __ jmp(&done);
6037 }
6038 break;
6039 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006040 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006041
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006042 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006043 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006044 __ xorl(out, out);
6045 }
6046
6047 if (done.IsLinked()) {
6048 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006049 }
6050
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006051 if (slow_path != nullptr) {
6052 __ Bind(slow_path->GetExitLabel());
6053 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006054}
6055
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006056static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006057 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006058 case TypeCheckKind::kExactCheck:
6059 case TypeCheckKind::kAbstractClassCheck:
6060 case TypeCheckKind::kClassHierarchyCheck:
6061 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006062 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006063 case TypeCheckKind::kInterfaceCheck:
6064 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006065 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006066 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006067 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006068 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006069 LOG(FATAL) << "Unreachable";
6070 UNREACHABLE();
6071}
6072
6073void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6074 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6075 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6076 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6077 LocationSummary::CallKind call_kind = is_fatal_slow_path
6078 ? LocationSummary::kNoCall
6079 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006080 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6081 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006082 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6083 // Require a register for the interface check since there is a loop that compares the class to
6084 // a memory address.
6085 locations->SetInAt(1, Location::RequiresRegister());
6086 } else {
6087 locations->SetInAt(1, Location::Any());
6088 }
6089
Roland Levillain0d5a2812015-11-13 10:07:31 +00006090 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6091 locations->AddTemp(Location::RequiresRegister());
6092 // When read barriers are enabled, we need an additional temporary
6093 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006094 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006095 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006096 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006097}
6098
6099void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006100 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006101 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006102 Location obj_loc = locations->InAt(0);
6103 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006104 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105 Location temp_loc = locations->GetTemp(0);
6106 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006107 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006108 locations->GetTemp(1) :
6109 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006110 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6111 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6112 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6113 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6114 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6115 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006116 const uint32_t object_array_data_offset =
6117 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006118
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006119 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6120 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6121 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006122 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006123 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006124 SlowPathCode* type_check_slow_path =
6125 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6126 is_type_check_slow_path_fatal);
6127 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006128
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006129
6130 NearLabel done;
6131 // Avoid null check if we know obj is not null.
6132 if (instruction->MustDoNullCheck()) {
6133 __ testl(obj, obj);
6134 __ j(kEqual, &done);
6135 }
6136
Roland Levillain0d5a2812015-11-13 10:07:31 +00006137 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006138 case TypeCheckKind::kExactCheck:
6139 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006140 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006141 GenerateReferenceLoadTwoRegisters(instruction,
6142 temp_loc,
6143 obj_loc,
6144 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006145 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006146 if (cls.IsRegister()) {
6147 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6148 } else {
6149 DCHECK(cls.IsStackSlot()) << cls;
6150 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6151 }
6152 // Jump to slow path for throwing the exception or doing a
6153 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006154 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006155 break;
6156 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006157
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006158 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006159 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006160 GenerateReferenceLoadTwoRegisters(instruction,
6161 temp_loc,
6162 obj_loc,
6163 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006164 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006165 // If the class is abstract, we eagerly fetch the super class of the
6166 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006167 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006168 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006169 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006170 GenerateReferenceLoadOneRegister(instruction,
6171 temp_loc,
6172 super_offset,
6173 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006174 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006175
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006176 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6177 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006178 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006179 // Otherwise, compare the classes.
6180 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006181 if (cls.IsRegister()) {
6182 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6183 } else {
6184 DCHECK(cls.IsStackSlot()) << cls;
6185 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6186 }
6187 __ j(kNotEqual, &loop);
6188 break;
6189 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006190
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006191 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006192 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006193 GenerateReferenceLoadTwoRegisters(instruction,
6194 temp_loc,
6195 obj_loc,
6196 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006197 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006198 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006199 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006200 __ Bind(&loop);
6201 if (cls.IsRegister()) {
6202 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6203 } else {
6204 DCHECK(cls.IsStackSlot()) << cls;
6205 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6206 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006207 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006208
Roland Levillain0d5a2812015-11-13 10:07:31 +00006209 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006210 GenerateReferenceLoadOneRegister(instruction,
6211 temp_loc,
6212 super_offset,
6213 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006214 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006215
6216 // If the class reference currently in `temp` is not null, jump
6217 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006218 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006219 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006220 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006221 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006222 break;
6223 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006224
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006225 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006226 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006227 GenerateReferenceLoadTwoRegisters(instruction,
6228 temp_loc,
6229 obj_loc,
6230 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006231 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006232 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006233 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006234 if (cls.IsRegister()) {
6235 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6236 } else {
6237 DCHECK(cls.IsStackSlot()) << cls;
6238 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6239 }
6240 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006241
6242 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006243 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006244 GenerateReferenceLoadOneRegister(instruction,
6245 temp_loc,
6246 component_offset,
6247 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006248 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006249
6250 // If the component type is not null (i.e. the object is indeed
6251 // an array), jump to label `check_non_primitive_component_type`
6252 // to further check that this component type is not a primitive
6253 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006254 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006255 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006256 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006257 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006258 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006259 break;
6260 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006261
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006262 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006263 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006264 //
6265 // We cannot directly call the CheckCast runtime entry point
6266 // without resorting to a type checking slow path here (i.e. by
6267 // calling InvokeRuntime directly), as it would require to
6268 // assign fixed registers for the inputs of this HInstanceOf
6269 // instruction (following the runtime calling convention), which
6270 // might be cluttered by the potential first read barrier
6271 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006272 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006273 break;
6274 }
6275
6276 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006277 // Fast path for the interface check. We always go slow path for heap poisoning since
6278 // unpoisoning cls would require an extra temp.
6279 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006280 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6281 // doing this.
6282 // /* HeapReference<Class> */ temp = obj->klass_
6283 GenerateReferenceLoadTwoRegisters(instruction,
6284 temp_loc,
6285 obj_loc,
6286 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006287 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006288
6289 // /* HeapReference<Class> */ temp = temp->iftable_
6290 GenerateReferenceLoadTwoRegisters(instruction,
6291 temp_loc,
6292 temp_loc,
6293 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006294 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006295 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006296 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006297 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006298 NearLabel start_loop;
6299 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006300 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006301 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006302 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6303 // Go to next interface if the classes do not match.
6304 __ cmpl(cls.AsRegister<CpuRegister>(),
6305 CodeGeneratorX86_64::ArrayAddress(temp,
6306 maybe_temp2_loc,
6307 TIMES_4,
6308 object_array_data_offset));
6309 __ j(kNotEqual, &start_loop); // Return if same class.
6310 } else {
6311 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006312 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006313 break;
6314 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006315
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006316 if (done.IsLinked()) {
6317 __ Bind(&done);
6318 }
6319
Roland Levillain0d5a2812015-11-13 10:07:31 +00006320 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006321}
6322
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006323void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6324 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006325 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006326 InvokeRuntimeCallingConvention calling_convention;
6327 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6328}
6329
6330void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006331 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006332 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006333 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006334 if (instruction->IsEnter()) {
6335 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6336 } else {
6337 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6338 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006339}
6340
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006341void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6342void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6343void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6344
6345void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6346 LocationSummary* locations =
6347 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6348 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6349 || instruction->GetResultType() == Primitive::kPrimLong);
6350 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006351 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006352 locations->SetOut(Location::SameAsFirstInput());
6353}
6354
6355void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6356 HandleBitwiseOperation(instruction);
6357}
6358
6359void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6360 HandleBitwiseOperation(instruction);
6361}
6362
6363void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6364 HandleBitwiseOperation(instruction);
6365}
6366
6367void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6368 LocationSummary* locations = instruction->GetLocations();
6369 Location first = locations->InAt(0);
6370 Location second = locations->InAt(1);
6371 DCHECK(first.Equals(locations->Out()));
6372
6373 if (instruction->GetResultType() == Primitive::kPrimInt) {
6374 if (second.IsRegister()) {
6375 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006376 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006377 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006378 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006379 } else {
6380 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006381 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006382 }
6383 } else if (second.IsConstant()) {
6384 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6385 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006386 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006387 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006388 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006389 } else {
6390 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006391 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006392 }
6393 } else {
6394 Address address(CpuRegister(RSP), second.GetStackIndex());
6395 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006396 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006397 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006398 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006399 } else {
6400 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006401 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006402 }
6403 }
6404 } else {
6405 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006406 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6407 bool second_is_constant = false;
6408 int64_t value = 0;
6409 if (second.IsConstant()) {
6410 second_is_constant = true;
6411 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006412 }
Mark Mendell40741f32015-04-20 22:10:34 -04006413 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006414
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006415 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006416 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006417 if (is_int32_value) {
6418 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6419 } else {
6420 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6421 }
6422 } else if (second.IsDoubleStackSlot()) {
6423 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006424 } else {
6425 __ andq(first_reg, second.AsRegister<CpuRegister>());
6426 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006427 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006428 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006429 if (is_int32_value) {
6430 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6431 } else {
6432 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6433 }
6434 } else if (second.IsDoubleStackSlot()) {
6435 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006436 } else {
6437 __ orq(first_reg, second.AsRegister<CpuRegister>());
6438 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006439 } else {
6440 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006441 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006442 if (is_int32_value) {
6443 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6444 } else {
6445 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6446 }
6447 } else if (second.IsDoubleStackSlot()) {
6448 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006449 } else {
6450 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6451 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006452 }
6453 }
6454}
6455
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006456void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6457 HInstruction* instruction,
6458 Location out,
6459 uint32_t offset,
6460 Location maybe_temp,
6461 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006462 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006463 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006464 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006465 if (kUseBakerReadBarrier) {
6466 // Load with fast path based Baker's read barrier.
6467 // /* HeapReference<Object> */ out = *(out + offset)
6468 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006469 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006470 } else {
6471 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006472 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006473 // in the following move operation, as we will need it for the
6474 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006475 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006476 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006477 // /* HeapReference<Object> */ out = *(out + offset)
6478 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006479 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006480 }
6481 } else {
6482 // Plain load with no read barrier.
6483 // /* HeapReference<Object> */ out = *(out + offset)
6484 __ movl(out_reg, Address(out_reg, offset));
6485 __ MaybeUnpoisonHeapReference(out_reg);
6486 }
6487}
6488
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006489void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6490 HInstruction* instruction,
6491 Location out,
6492 Location obj,
6493 uint32_t offset,
6494 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006495 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6496 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006497 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006498 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006499 if (kUseBakerReadBarrier) {
6500 // Load with fast path based Baker's read barrier.
6501 // /* HeapReference<Object> */ out = *(obj + offset)
6502 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006503 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006504 } else {
6505 // Load with slow path based read barrier.
6506 // /* HeapReference<Object> */ out = *(obj + offset)
6507 __ movl(out_reg, Address(obj_reg, offset));
6508 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6509 }
6510 } else {
6511 // Plain load with no read barrier.
6512 // /* HeapReference<Object> */ out = *(obj + offset)
6513 __ movl(out_reg, Address(obj_reg, offset));
6514 __ MaybeUnpoisonHeapReference(out_reg);
6515 }
6516}
6517
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006518void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6519 HInstruction* instruction,
6520 Location root,
6521 const Address& address,
6522 Label* fixup_label,
6523 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006524 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006525 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006526 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006527 if (kUseBakerReadBarrier) {
6528 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6529 // Baker's read barrier are used:
6530 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006531 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006532 // if (Thread::Current()->GetIsGcMarking()) {
6533 // root = ReadBarrier::Mark(root)
6534 // }
6535
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006536 // /* GcRoot<mirror::Object> */ root = *address
6537 __ movl(root_reg, address);
6538 if (fixup_label != nullptr) {
6539 __ Bind(fixup_label);
6540 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006541 static_assert(
6542 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6543 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6544 "have different sizes.");
6545 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6546 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6547 "have different sizes.");
6548
Vladimir Marko953437b2016-08-24 08:30:46 +00006549 // Slow path marking the GC root `root`.
6550 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006551 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006552 codegen_->AddSlowPath(slow_path);
6553
Andreas Gampe542451c2016-07-26 09:02:02 -07006554 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006555 /* no_rip */ true),
6556 Immediate(0));
6557 __ j(kNotEqual, slow_path->GetEntryLabel());
6558 __ Bind(slow_path->GetExitLabel());
6559 } else {
6560 // GC root loaded through a slow path for read barriers other
6561 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006562 // /* GcRoot<mirror::Object>* */ root = address
6563 __ leaq(root_reg, address);
6564 if (fixup_label != nullptr) {
6565 __ Bind(fixup_label);
6566 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006567 // /* mirror::Object* */ root = root->Read()
6568 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6569 }
6570 } else {
6571 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006572 // /* GcRoot<mirror::Object> */ root = *address
6573 __ movl(root_reg, address);
6574 if (fixup_label != nullptr) {
6575 __ Bind(fixup_label);
6576 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006577 // Note that GC roots are not affected by heap poisoning, thus we
6578 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006579 }
6580}
6581
6582void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6583 Location ref,
6584 CpuRegister obj,
6585 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006586 bool needs_null_check) {
6587 DCHECK(kEmitCompilerReadBarrier);
6588 DCHECK(kUseBakerReadBarrier);
6589
6590 // /* HeapReference<Object> */ ref = *(obj + offset)
6591 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006592 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006593}
6594
6595void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6596 Location ref,
6597 CpuRegister obj,
6598 uint32_t data_offset,
6599 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006600 bool needs_null_check) {
6601 DCHECK(kEmitCompilerReadBarrier);
6602 DCHECK(kUseBakerReadBarrier);
6603
Roland Levillain3d312422016-06-23 13:53:42 +01006604 static_assert(
6605 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6606 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006607 // /* HeapReference<Object> */ ref =
6608 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006609 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006610 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006611}
6612
6613void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6614 Location ref,
6615 CpuRegister obj,
6616 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006617 bool needs_null_check,
6618 bool always_update_field,
6619 CpuRegister* temp1,
6620 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006621 DCHECK(kEmitCompilerReadBarrier);
6622 DCHECK(kUseBakerReadBarrier);
6623
6624 // In slow path based read barriers, the read barrier call is
6625 // inserted after the original load. However, in fast path based
6626 // Baker's read barriers, we need to perform the load of
6627 // mirror::Object::monitor_ *before* the original reference load.
6628 // This load-load ordering is required by the read barrier.
6629 // The fast path/slow path (for Baker's algorithm) should look like:
6630 //
6631 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6632 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6633 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006634 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006635 // if (is_gray) {
6636 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6637 // }
6638 //
6639 // Note: the original implementation in ReadBarrier::Barrier is
6640 // slightly more complex as:
6641 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006642 // the high-bits of rb_state, which are expected to be all zeroes
6643 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6644 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006645 // - it performs additional checks that we do not do here for
6646 // performance reasons.
6647
6648 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006649 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6650
Vladimir Marko953437b2016-08-24 08:30:46 +00006651 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006652 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6653 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006654 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6655 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6656 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6657
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006658 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006659 // ref = ReadBarrier::Mark(ref);
6660 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6661 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006662 if (needs_null_check) {
6663 MaybeRecordImplicitNullCheck(instruction);
6664 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006665
6666 // Load fence to prevent load-load reordering.
6667 // Note that this is a no-op, thanks to the x86-64 memory model.
6668 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6669
6670 // The actual reference load.
6671 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006672 __ movl(ref_reg, src); // Flags are unaffected.
6673
6674 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6675 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006676 SlowPathCode* slow_path;
6677 if (always_update_field) {
6678 DCHECK(temp1 != nullptr);
6679 DCHECK(temp2 != nullptr);
6680 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6681 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6682 } else {
6683 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6684 instruction, ref, /* unpoison_ref_before_marking */ true);
6685 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006686 AddSlowPath(slow_path);
6687
6688 // We have done the "if" of the gray bit check above, now branch based on the flags.
6689 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006690
6691 // Object* ref = ref_addr->AsMirrorPtr()
6692 __ MaybeUnpoisonHeapReference(ref_reg);
6693
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006694 __ Bind(slow_path->GetExitLabel());
6695}
6696
6697void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6698 Location out,
6699 Location ref,
6700 Location obj,
6701 uint32_t offset,
6702 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006703 DCHECK(kEmitCompilerReadBarrier);
6704
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006705 // Insert a slow path based read barrier *after* the reference load.
6706 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006707 // If heap poisoning is enabled, the unpoisoning of the loaded
6708 // reference will be carried out by the runtime within the slow
6709 // path.
6710 //
6711 // Note that `ref` currently does not get unpoisoned (when heap
6712 // poisoning is enabled), which is alright as the `ref` argument is
6713 // not used by the artReadBarrierSlow entry point.
6714 //
6715 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6716 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6717 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6718 AddSlowPath(slow_path);
6719
Roland Levillain0d5a2812015-11-13 10:07:31 +00006720 __ jmp(slow_path->GetEntryLabel());
6721 __ Bind(slow_path->GetExitLabel());
6722}
6723
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006724void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6725 Location out,
6726 Location ref,
6727 Location obj,
6728 uint32_t offset,
6729 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006730 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006731 // Baker's read barriers shall be handled by the fast path
6732 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6733 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006734 // If heap poisoning is enabled, unpoisoning will be taken care of
6735 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006736 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006737 } else if (kPoisonHeapReferences) {
6738 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6739 }
6740}
6741
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006742void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6743 Location out,
6744 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745 DCHECK(kEmitCompilerReadBarrier);
6746
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006747 // Insert a slow path based read barrier *after* the GC root load.
6748 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006749 // Note that GC roots are not affected by heap poisoning, so we do
6750 // not need to do anything special for this here.
6751 SlowPathCode* slow_path =
6752 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6753 AddSlowPath(slow_path);
6754
Roland Levillain0d5a2812015-11-13 10:07:31 +00006755 __ jmp(slow_path->GetEntryLabel());
6756 __ Bind(slow_path->GetExitLabel());
6757}
6758
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006759void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006760 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006761 LOG(FATAL) << "Unreachable";
6762}
6763
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006764void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006765 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006766 LOG(FATAL) << "Unreachable";
6767}
6768
Mark Mendellfe57faa2015-09-18 09:26:15 -04006769// Simple implementation of packed switch - generate cascaded compare/jumps.
6770void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6771 LocationSummary* locations =
6772 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6773 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006774 locations->AddTemp(Location::RequiresRegister());
6775 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006776}
6777
6778void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6779 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006780 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006781 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006782 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6783 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6784 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006785 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6786
6787 // Should we generate smaller inline compare/jumps?
6788 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6789 // Figure out the correct compare values and jump conditions.
6790 // Handle the first compare/branch as a special case because it might
6791 // jump to the default case.
6792 DCHECK_GT(num_entries, 2u);
6793 Condition first_condition;
6794 uint32_t index;
6795 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6796 if (lower_bound != 0) {
6797 first_condition = kLess;
6798 __ cmpl(value_reg_in, Immediate(lower_bound));
6799 __ j(first_condition, codegen_->GetLabelOf(default_block));
6800 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6801
6802 index = 1;
6803 } else {
6804 // Handle all the compare/jumps below.
6805 first_condition = kBelow;
6806 index = 0;
6807 }
6808
6809 // Handle the rest of the compare/jumps.
6810 for (; index + 1 < num_entries; index += 2) {
6811 int32_t compare_to_value = lower_bound + index + 1;
6812 __ cmpl(value_reg_in, Immediate(compare_to_value));
6813 // Jump to successors[index] if value < case_value[index].
6814 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6815 // Jump to successors[index + 1] if value == case_value[index + 1].
6816 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6817 }
6818
6819 if (index != num_entries) {
6820 // There are an odd number of entries. Handle the last one.
6821 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006822 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006823 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6824 }
6825
6826 // And the default for any other value.
6827 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6828 __ jmp(codegen_->GetLabelOf(default_block));
6829 }
6830 return;
6831 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006832
6833 // Remove the bias, if needed.
6834 Register value_reg_out = value_reg_in.AsRegister();
6835 if (lower_bound != 0) {
6836 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6837 value_reg_out = temp_reg.AsRegister();
6838 }
6839 CpuRegister value_reg(value_reg_out);
6840
6841 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006842 __ cmpl(value_reg, Immediate(num_entries - 1));
6843 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006844
Mark Mendell9c86b482015-09-18 13:36:07 -04006845 // We are in the range of the table.
6846 // Load the address of the jump table in the constant area.
6847 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006848
Mark Mendell9c86b482015-09-18 13:36:07 -04006849 // Load the (signed) offset from the jump table.
6850 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6851
6852 // Add the offset to the address of the table base.
6853 __ addq(temp_reg, base_reg);
6854
6855 // And jump.
6856 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006857}
6858
Aart Bikc5d47542016-01-27 17:00:35 -08006859void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6860 if (value == 0) {
6861 __ xorl(dest, dest);
6862 } else {
6863 __ movl(dest, Immediate(value));
6864 }
6865}
6866
Mark Mendell92e83bf2015-05-07 11:25:03 -04006867void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6868 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006869 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006870 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006871 } else if (IsUint<32>(value)) {
6872 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006873 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6874 } else {
6875 __ movq(dest, Immediate(value));
6876 }
6877}
6878
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006879void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6880 if (value == 0) {
6881 __ xorps(dest, dest);
6882 } else {
6883 __ movss(dest, LiteralInt32Address(value));
6884 }
6885}
6886
6887void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6888 if (value == 0) {
6889 __ xorpd(dest, dest);
6890 } else {
6891 __ movsd(dest, LiteralInt64Address(value));
6892 }
6893}
6894
6895void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6896 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6897}
6898
6899void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6900 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6901}
6902
Aart Bika19616e2016-02-01 18:57:58 -08006903void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6904 if (value == 0) {
6905 __ testl(dest, dest);
6906 } else {
6907 __ cmpl(dest, Immediate(value));
6908 }
6909}
6910
6911void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6912 if (IsInt<32>(value)) {
6913 if (value == 0) {
6914 __ testq(dest, dest);
6915 } else {
6916 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6917 }
6918 } else {
6919 // Value won't fit in an int.
6920 __ cmpq(dest, LiteralInt64Address(value));
6921 }
6922}
6923
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006924void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6925 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006926 GenerateIntCompare(lhs_reg, rhs);
6927}
6928
6929void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006930 if (rhs.IsConstant()) {
6931 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006932 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006933 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006934 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006935 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006936 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006937 }
6938}
6939
6940void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6941 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6942 if (rhs.IsConstant()) {
6943 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6944 Compare64BitValue(lhs_reg, value);
6945 } else if (rhs.IsDoubleStackSlot()) {
6946 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6947 } else {
6948 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6949 }
6950}
6951
6952Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6953 Location index,
6954 ScaleFactor scale,
6955 uint32_t data_offset) {
6956 return index.IsConstant() ?
6957 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6958 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6959}
6960
Mark Mendellcfa410b2015-05-25 16:02:44 -04006961void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6962 DCHECK(dest.IsDoubleStackSlot());
6963 if (IsInt<32>(value)) {
6964 // Can move directly as an int32 constant.
6965 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6966 Immediate(static_cast<int32_t>(value)));
6967 } else {
6968 Load64BitValue(CpuRegister(TMP), value);
6969 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6970 }
6971}
6972
Mark Mendell9c86b482015-09-18 13:36:07 -04006973/**
6974 * Class to handle late fixup of offsets into constant area.
6975 */
6976class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6977 public:
6978 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6979 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6980
6981 protected:
6982 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6983
6984 CodeGeneratorX86_64* codegen_;
6985
6986 private:
6987 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6988 // Patch the correct offset for the instruction. We use the address of the
6989 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6990 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6991 int32_t relative_position = constant_offset - pos;
6992
6993 // Patch in the right value.
6994 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6995 }
6996
6997 // Location in constant area that the fixup refers to.
6998 size_t offset_into_constant_area_;
6999};
7000
7001/**
7002 t * Class to handle late fixup of offsets to a jump table that will be created in the
7003 * constant area.
7004 */
7005class JumpTableRIPFixup : public RIPFixup {
7006 public:
7007 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7008 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7009
7010 void CreateJumpTable() {
7011 X86_64Assembler* assembler = codegen_->GetAssembler();
7012
7013 // Ensure that the reference to the jump table has the correct offset.
7014 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7015 SetOffset(offset_in_constant_table);
7016
7017 // Compute the offset from the start of the function to this jump table.
7018 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7019
7020 // Populate the jump table with the correct values for the jump table.
7021 int32_t num_entries = switch_instr_->GetNumEntries();
7022 HBasicBlock* block = switch_instr_->GetBlock();
7023 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7024 // The value that we want is the target offset - the position of the table.
7025 for (int32_t i = 0; i < num_entries; i++) {
7026 HBasicBlock* b = successors[i];
7027 Label* l = codegen_->GetLabelOf(b);
7028 DCHECK(l->IsBound());
7029 int32_t offset_to_block = l->Position() - current_table_offset;
7030 assembler->AppendInt32(offset_to_block);
7031 }
7032 }
7033
7034 private:
7035 const HPackedSwitch* switch_instr_;
7036};
7037
Mark Mendellf55c3e02015-03-26 21:07:46 -04007038void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7039 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007040 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007041 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7042 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007043 assembler->Align(4, 0);
7044 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007045
7046 // Populate any jump tables.
7047 for (auto jump_table : fixups_to_jump_tables_) {
7048 jump_table->CreateJumpTable();
7049 }
7050
7051 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007052 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007053 }
7054
7055 // And finish up.
7056 CodeGenerator::Finalize(allocator);
7057}
7058
Mark Mendellf55c3e02015-03-26 21:07:46 -04007059Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7060 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7061 return Address::RIP(fixup);
7062}
7063
7064Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7065 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7066 return Address::RIP(fixup);
7067}
7068
7069Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7070 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7071 return Address::RIP(fixup);
7072}
7073
7074Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7075 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7076 return Address::RIP(fixup);
7077}
7078
Andreas Gampe85b62f22015-09-09 13:15:38 -07007079// TODO: trg as memory.
7080void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7081 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007082 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007083 return;
7084 }
7085
7086 DCHECK_NE(type, Primitive::kPrimVoid);
7087
7088 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7089 if (trg.Equals(return_loc)) {
7090 return;
7091 }
7092
7093 // Let the parallel move resolver take care of all of this.
7094 HParallelMove parallel_move(GetGraph()->GetArena());
7095 parallel_move.AddMove(return_loc, trg, type, nullptr);
7096 GetMoveResolver()->EmitNativeCode(&parallel_move);
7097}
7098
Mark Mendell9c86b482015-09-18 13:36:07 -04007099Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7100 // Create a fixup to be used to create and address the jump table.
7101 JumpTableRIPFixup* table_fixup =
7102 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7103
7104 // We have to populate the jump tables.
7105 fixups_to_jump_tables_.push_back(table_fixup);
7106 return Address::RIP(table_fixup);
7107}
7108
Mark Mendellea5af682015-10-22 17:35:49 -04007109void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7110 const Address& addr_high,
7111 int64_t v,
7112 HInstruction* instruction) {
7113 if (IsInt<32>(v)) {
7114 int32_t v_32 = v;
7115 __ movq(addr_low, Immediate(v_32));
7116 MaybeRecordImplicitNullCheck(instruction);
7117 } else {
7118 // Didn't fit in a register. Do it in pieces.
7119 int32_t low_v = Low32Bits(v);
7120 int32_t high_v = High32Bits(v);
7121 __ movl(addr_low, Immediate(low_v));
7122 MaybeRecordImplicitNullCheck(instruction);
7123 __ movl(addr_high, Immediate(high_v));
7124 }
7125}
7126
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007127void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7128 const uint8_t* roots_data,
7129 const PatchInfo<Label>& info,
7130 uint64_t index_in_table) const {
7131 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7132 uintptr_t address =
7133 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7134 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7135 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7136 dchecked_integral_cast<uint32_t>(address);
7137}
7138
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007139void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7140 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007141 const auto& it = jit_string_roots_.find(
7142 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007143 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007144 PatchJitRootUse(code, roots_data, info, it->second);
7145 }
7146
7147 for (const PatchInfo<Label>& info : jit_class_patches_) {
7148 const auto& it = jit_class_roots_.find(
7149 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7150 DCHECK(it != jit_class_roots_.end());
7151 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007152 }
7153}
7154
Roland Levillain4d027112015-07-01 15:41:14 +01007155#undef __
7156
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007157} // namespace x86_64
7158} // namespace art