blob: 261473505f92891713e62493b844ec5fd4a2d72d [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100202 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
250 Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100251 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000252 at_,
253 dex_pc_,
254 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000255 if (do_clinit_) {
256 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
257 } else {
258 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000261 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 if (out.IsValid()) {
264 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000265 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 }
267
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ jmp(GetExitLabel());
270 }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
273
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100274 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // The class this slow path will load.
276 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100277
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000278 // The instruction where this slow path is happening.
279 // (Might be the load class or an initialization check).
280 HInstruction* const at_;
281
282 // The dex PC of `at_`.
283 const uint32_t dex_pc_;
284
285 // Whether to initialize the class.
286 const bool do_clinit_;
287
288 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289};
290
Vladimir Markoaad75c62016-10-03 08:46:48 +0000291class LoadStringSlowPathX86_64 : public SlowPathCode {
292 public:
293 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
294
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296 LocationSummary* locations = instruction_->GetLocations();
297 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
298
299 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
300 __ Bind(GetEntryLabel());
301 SaveLiveRegisters(codegen, locations);
302
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800303 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100304 // Custom calling convention: RAX serves as both input and output.
305 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000306 x86_64_codegen->InvokeRuntime(kQuickResolveString,
307 instruction_,
308 instruction_->GetDexPc(),
309 this);
310 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
311 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
312 RestoreLiveRegisters(codegen, locations);
313
314 // Store the resolved String to the BSS entry.
315 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
316 locations->Out().AsRegister<CpuRegister>());
317 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
318 __ Bind(fixup_label);
319
320 __ jmp(GetExitLabel());
321 }
322
323 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
324
325 private:
326 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
327};
328
Andreas Gampe85b62f22015-09-09 13:15:38 -0700329class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100336 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000337 DCHECK(instruction_->IsCheckCast()
338 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Roland Levillain0d5a2812015-11-13 10:07:31 +0000340 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 if (!is_fatal_) {
344 SaveLiveRegisters(codegen, locations);
345 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
347 // We're moving two locations to locations that could overlap, so we need a parallel
348 // move resolver.
349 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
352 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800353 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800354 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
355 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100357 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
362 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100393 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100432 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000433 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100434 RestoreLiveRegisters(codegen, locations);
435 __ jmp(GetExitLabel());
436 }
437
438 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
439
440 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
442};
443
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100444// Slow path marking an object reference `ref` during a read
445// barrier. The field `obj.field` in the object `obj` holding this
446// reference does not get updated by this slow path after marking (see
447// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
448//
449// This means that after the execution of this slow path, `ref` will
450// always be up-to-date, but `obj.field` may not; i.e., after the
451// flip, `ref` will be a to-space reference, but `obj.field` will
452// probably still be a from-space reference (unless it gets updated by
453// another thread, or if another thread installed another object
454// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000455class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
456 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
458 Location ref,
459 bool unpoison_ref_before_marking)
460 : SlowPathCode(instruction),
461 ref_(ref),
462 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000463 DCHECK(kEmitCompilerReadBarrier);
464 }
465
466 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
467
468 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
469 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100470 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
471 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000472 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100473 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474 DCHECK(instruction_->IsInstanceFieldGet() ||
475 instruction_->IsStaticFieldGet() ||
476 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100477 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000478 instruction_->IsLoadClass() ||
479 instruction_->IsLoadString() ||
480 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100481 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100482 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
483 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 << "Unexpected instruction in read barrier marking slow path: "
485 << instruction_->DebugName();
486
487 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100488 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000489 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100490 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000491 }
Roland Levillain4359e612016-07-20 11:32:19 +0100492 // No need to save live registers; it's taken care of by the
493 // entrypoint. Also, there is no need to update the stack mask,
494 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100496 DCHECK_NE(ref_reg, RSP);
497 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100498 // "Compact" slow path, saving two moves.
499 //
500 // Instead of using the standard runtime calling convention (input
501 // and output in R0):
502 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100504 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100505 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100506 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // of a dedicated entrypoint:
509 //
510 // rX <- ReadBarrierMarkRegX(rX)
511 //
512 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 // This runtime call does not require a stack map.
515 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 __ jmp(GetExitLabel());
517 }
518
519 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 // The location (register) of the marked object reference.
521 const Location ref_;
522 // Should the reference in `ref_` be unpoisoned prior to marking it?
523 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000524
525 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
526};
527
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528// Slow path marking an object reference `ref` during a read barrier,
529// and if needed, atomically updating the field `obj.field` in the
530// object `obj` holding this reference after marking (contrary to
531// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
532// `obj.field`).
533//
534// This means that after the execution of this slow path, both `ref`
535// and `obj.field` will be up-to-date; i.e., after the flip, both will
536// hold the same to-space reference (unless another thread installed
537// another object reference (different from `ref`) in `obj.field`).
538class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
539 public:
540 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
541 Location ref,
542 CpuRegister obj,
543 const Address& field_addr,
544 bool unpoison_ref_before_marking,
545 CpuRegister temp1,
546 CpuRegister temp2)
547 : SlowPathCode(instruction),
548 ref_(ref),
549 obj_(obj),
550 field_addr_(field_addr),
551 unpoison_ref_before_marking_(unpoison_ref_before_marking),
552 temp1_(temp1),
553 temp2_(temp2) {
554 DCHECK(kEmitCompilerReadBarrier);
555 }
556
557 const char* GetDescription() const OVERRIDE {
558 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
559 }
560
561 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
562 LocationSummary* locations = instruction_->GetLocations();
563 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
564 Register ref_reg = ref_cpu_reg.AsRegister();
565 DCHECK(locations->CanCall());
566 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
567 // This slow path is only used by the UnsafeCASObject intrinsic.
568 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
569 << "Unexpected instruction in read barrier marking and field updating slow path: "
570 << instruction_->DebugName();
571 DCHECK(instruction_->GetLocations()->Intrinsified());
572 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
573
574 __ Bind(GetEntryLabel());
575 if (unpoison_ref_before_marking_) {
576 // Object* ref = ref_addr->AsMirrorPtr()
577 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
578 }
579
580 // Save the old (unpoisoned) reference.
581 __ movl(temp1_, ref_cpu_reg);
582
583 // No need to save live registers; it's taken care of by the
584 // entrypoint. Also, there is no need to update the stack mask,
585 // as this runtime call will not trigger a garbage collection.
586 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
587 DCHECK_NE(ref_reg, RSP);
588 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
589 // "Compact" slow path, saving two moves.
590 //
591 // Instead of using the standard runtime calling convention (input
592 // and output in R0):
593 //
594 // RDI <- ref
595 // RAX <- ReadBarrierMark(RDI)
596 // ref <- RAX
597 //
598 // we just use rX (the register containing `ref`) as input and output
599 // of a dedicated entrypoint:
600 //
601 // rX <- ReadBarrierMarkRegX(rX)
602 //
603 int32_t entry_point_offset =
604 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
605 // This runtime call does not require a stack map.
606 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp1_, ref_cpu_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achived
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save RAX beforehand, and move the
627 // expected value (stored in `temp1_`) into EAX.
628 __ movq(temp2_, CpuRegister(RAX));
629 __ movl(CpuRegister(RAX), temp1_);
630
631 // Convenience aliases.
632 CpuRegister base = obj_;
633 CpuRegister expected = CpuRegister(RAX);
634 CpuRegister value = ref_cpu_reg;
635
636 bool base_equals_value = (base.AsRegister() == value.AsRegister());
637 Register value_reg = ref_reg;
638 if (kPoisonHeapReferences) {
639 if (base_equals_value) {
640 // If `base` and `value` are the same register location, move
641 // `value_reg` to a temporary register. This way, poisoning
642 // `value_reg` won't invalidate `base`.
643 value_reg = temp1_.AsRegister();
644 __ movl(CpuRegister(value_reg), base);
645 }
646
647 // Check that the register allocator did not assign the location
648 // of `expected` (RAX) to `value` nor to `base`, so that heap
649 // poisoning (when enabled) works as intended below.
650 // - If `value` were equal to `expected`, both references would
651 // be poisoned twice, meaning they would not be poisoned at
652 // all, as heap poisoning uses address negation.
653 // - If `base` were equal to `expected`, poisoning `expected`
654 // would invalidate `base`.
655 DCHECK_NE(value_reg, expected.AsRegister());
656 DCHECK_NE(base.AsRegister(), expected.AsRegister());
657
658 __ PoisonHeapReference(expected);
659 __ PoisonHeapReference(CpuRegister(value_reg));
660 }
661
662 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
663
664 // If heap poisoning is enabled, we need to unpoison the values
665 // that were poisoned earlier.
666 if (kPoisonHeapReferences) {
667 if (base_equals_value) {
668 // `value_reg` has been moved to a temporary register, no need
669 // to unpoison it.
670 } else {
671 __ UnpoisonHeapReference(CpuRegister(value_reg));
672 }
673 // No need to unpoison `expected` (RAX), as it is be overwritten below.
674 }
675
676 // Restore RAX.
677 __ movq(CpuRegister(RAX), temp2_);
678
679 __ Bind(&done);
680 __ jmp(GetExitLabel());
681 }
682
683 private:
684 // The location (register) of the marked object reference.
685 const Location ref_;
686 // The register containing the object holding the marked object reference field.
687 const CpuRegister obj_;
688 // The address of the marked reference field. The base of this address must be `obj_`.
689 const Address field_addr_;
690
691 // Should the reference in `ref_` be unpoisoned prior to marking it?
692 const bool unpoison_ref_before_marking_;
693
694 const CpuRegister temp1_;
695 const CpuRegister temp2_;
696
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
698};
699
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700// Slow path generating a read barrier for a heap reference.
701class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
702 public:
703 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
704 Location out,
705 Location ref,
706 Location obj,
707 uint32_t offset,
708 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000709 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710 out_(out),
711 ref_(ref),
712 obj_(obj),
713 offset_(offset),
714 index_(index) {
715 DCHECK(kEmitCompilerReadBarrier);
716 // If `obj` is equal to `out` or `ref`, it means the initial
717 // object has been overwritten by (or after) the heap object
718 // reference load to be instrumented, e.g.:
719 //
720 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000721 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000722 //
723 // In that case, we have lost the information about the original
724 // object, and the emitted read barrier cannot work properly.
725 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
726 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
727}
728
729 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
730 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
731 LocationSummary* locations = instruction_->GetLocations();
732 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
733 DCHECK(locations->CanCall());
734 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100735 DCHECK(instruction_->IsInstanceFieldGet() ||
736 instruction_->IsStaticFieldGet() ||
737 instruction_->IsArrayGet() ||
738 instruction_->IsInstanceOf() ||
739 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100740 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000741 << "Unexpected instruction in read barrier for heap reference slow path: "
742 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743
744 __ Bind(GetEntryLabel());
745 SaveLiveRegisters(codegen, locations);
746
747 // We may have to change the index's value, but as `index_` is a
748 // constant member (like other "inputs" of this slow path),
749 // introduce a copy of it, `index`.
750 Location index = index_;
751 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100752 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753 if (instruction_->IsArrayGet()) {
754 // Compute real offset and store it in index_.
755 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
756 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
757 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
758 // We are about to change the value of `index_reg` (see the
759 // calls to art::x86_64::X86_64Assembler::shll and
760 // art::x86_64::X86_64Assembler::AddImmediate below), but it
761 // has not been saved by the previous call to
762 // art::SlowPathCode::SaveLiveRegisters, as it is a
763 // callee-save register --
764 // art::SlowPathCode::SaveLiveRegisters does not consider
765 // callee-save registers, as it has been designed with the
766 // assumption that callee-save registers are supposed to be
767 // handled by the called function. So, as a callee-save
768 // register, `index_reg` _would_ eventually be saved onto
769 // the stack, but it would be too late: we would have
770 // changed its value earlier. Therefore, we manually save
771 // it here into another freely available register,
772 // `free_reg`, chosen of course among the caller-save
773 // registers (as a callee-save `free_reg` register would
774 // exhibit the same problem).
775 //
776 // Note we could have requested a temporary register from
777 // the register allocator instead; but we prefer not to, as
778 // this is a slow path, and we know we can find a
779 // caller-save register that is available.
780 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
781 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
782 index_reg = free_reg;
783 index = Location::RegisterLocation(index_reg);
784 } else {
785 // The initial register stored in `index_` has already been
786 // saved in the call to art::SlowPathCode::SaveLiveRegisters
787 // (as it is not a callee-save register), so we can freely
788 // use it.
789 }
790 // Shifting the index value contained in `index_reg` by the
791 // scale factor (2) cannot overflow in practice, as the
792 // runtime is unable to allocate object arrays with a size
793 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
794 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
795 static_assert(
796 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
797 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
798 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
799 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100800 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
801 // intrinsics, `index_` is not shifted by a scale factor of 2
802 // (as in the case of ArrayGet), as it is actually an offset
803 // to an object field within an object.
804 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000805 DCHECK(instruction_->GetLocations()->Intrinsified());
806 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
807 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
808 << instruction_->AsInvoke()->GetIntrinsic();
809 DCHECK_EQ(offset_, 0U);
810 DCHECK(index_.IsRegister());
811 }
812 }
813
814 // We're moving two or three locations to locations that could
815 // overlap, so we need a parallel move resolver.
816 InvokeRuntimeCallingConvention calling_convention;
817 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
818 parallel_move.AddMove(ref_,
819 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
820 Primitive::kPrimNot,
821 nullptr);
822 parallel_move.AddMove(obj_,
823 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
824 Primitive::kPrimNot,
825 nullptr);
826 if (index.IsValid()) {
827 parallel_move.AddMove(index,
828 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
829 Primitive::kPrimInt,
830 nullptr);
831 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
832 } else {
833 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
834 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
835 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100836 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000837 instruction_,
838 instruction_->GetDexPc(),
839 this);
840 CheckEntrypointTypes<
841 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
842 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
843
844 RestoreLiveRegisters(codegen, locations);
845 __ jmp(GetExitLabel());
846 }
847
848 const char* GetDescription() const OVERRIDE {
849 return "ReadBarrierForHeapReferenceSlowPathX86_64";
850 }
851
852 private:
853 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
854 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
855 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
856 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
857 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
858 return static_cast<CpuRegister>(i);
859 }
860 }
861 // We shall never fail to find a free caller-save register, as
862 // there are more than two core caller-save registers on x86-64
863 // (meaning it is possible to find one which is different from
864 // `ref` and `obj`).
865 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
866 LOG(FATAL) << "Could not find a free caller-save register";
867 UNREACHABLE();
868 }
869
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 const Location out_;
871 const Location ref_;
872 const Location obj_;
873 const uint32_t offset_;
874 // An additional location containing an index to an array.
875 // Only used for HArrayGet and the UnsafeGetObject &
876 // UnsafeGetObjectVolatile intrinsics.
877 const Location index_;
878
879 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
880};
881
882// Slow path generating a read barrier for a GC root.
883class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
884 public:
885 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000886 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000887 DCHECK(kEmitCompilerReadBarrier);
888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889
890 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
891 LocationSummary* locations = instruction_->GetLocations();
892 DCHECK(locations->CanCall());
893 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
895 << "Unexpected instruction in read barrier for GC root slow path: "
896 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000897
898 __ Bind(GetEntryLabel());
899 SaveLiveRegisters(codegen, locations);
900
901 InvokeRuntimeCallingConvention calling_convention;
902 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
903 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100904 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000905 instruction_,
906 instruction_->GetDexPc(),
907 this);
908 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
909 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
910
911 RestoreLiveRegisters(codegen, locations);
912 __ jmp(GetExitLabel());
913 }
914
915 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
916
917 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918 const Location out_;
919 const Location root_;
920
921 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
922};
923
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100925// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
926#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100927
Roland Levillain4fa13f62015-07-06 18:11:54 +0100928inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700929 switch (cond) {
930 case kCondEQ: return kEqual;
931 case kCondNE: return kNotEqual;
932 case kCondLT: return kLess;
933 case kCondLE: return kLessEqual;
934 case kCondGT: return kGreater;
935 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700936 case kCondB: return kBelow;
937 case kCondBE: return kBelowEqual;
938 case kCondA: return kAbove;
939 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700940 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100941 LOG(FATAL) << "Unreachable";
942 UNREACHABLE();
943}
944
Aart Bike9f37602015-10-09 11:15:55 -0700945// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100946inline Condition X86_64FPCondition(IfCondition cond) {
947 switch (cond) {
948 case kCondEQ: return kEqual;
949 case kCondNE: return kNotEqual;
950 case kCondLT: return kBelow;
951 case kCondLE: return kBelowEqual;
952 case kCondGT: return kAbove;
953 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700954 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100955 };
956 LOG(FATAL) << "Unreachable";
957 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700958}
959
Vladimir Markodc151b22015-10-15 18:02:30 +0100960HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
961 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100962 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000963 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100964}
965
Serguei Katkov288c7a82016-05-16 11:53:15 +0600966Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
967 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800968 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000969 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
970 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100971 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000972 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100973 uint32_t offset =
974 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
975 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000976 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100977 }
Vladimir Marko58155012015-08-19 12:49:41 +0000978 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000979 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000980 break;
981 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
982 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
983 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000984 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000985 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000986 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000987 // Bind a new fixup label at the end of the "movl" insn.
988 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100989 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000990 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000991 }
Vladimir Marko58155012015-08-19 12:49:41 +0000992 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000993 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000994 Register method_reg;
995 CpuRegister reg = temp.AsRegister<CpuRegister>();
996 if (current_method.IsRegister()) {
997 method_reg = current_method.AsRegister<Register>();
998 } else {
999 DCHECK(invoke->GetLocations()->Intrinsified());
1000 DCHECK(!current_method.IsValid());
1001 method_reg = reg.AsRegister();
1002 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1003 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001004 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001005 __ movq(reg,
1006 Address(CpuRegister(method_reg),
1007 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001008 // temp = temp[index_in_cache];
1009 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1010 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001011 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1012 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001013 }
Vladimir Marko58155012015-08-19 12:49:41 +00001014 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001015 return callee_method;
1016}
1017
1018void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1019 Location temp) {
1020 // All registers are assumed to be correctly set up.
1021 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001022
1023 switch (invoke->GetCodePtrLocation()) {
1024 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1025 __ call(&frame_entry_label_);
1026 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001027 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1028 // (callee_method + offset_of_quick_compiled_code)()
1029 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1030 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001031 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001032 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001033 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001034
1035 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001036}
1037
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001038void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1039 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1040 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1041 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001042
1043 // Use the calling convention instead of the location of the receiver, as
1044 // intrinsics may have put the receiver in a different register. In the intrinsics
1045 // slow path, the arguments have been moved to the right place, so here we are
1046 // guaranteed that the receiver is the first register of the calling convention.
1047 InvokeDexCallingConvention calling_convention;
1048 Register receiver = calling_convention.GetRegisterAt(0);
1049
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001051 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001052 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001053 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001054 // Instead of simply (possibly) unpoisoning `temp` here, we should
1055 // emit a read barrier for the previous class reference load.
1056 // However this is not required in practice, as this is an
1057 // intermediate/temporary reference and because the current
1058 // concurrent copying collector keeps the from-space memory
1059 // intact/accessible until the end of the marking phase (the
1060 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 __ MaybeUnpoisonHeapReference(temp);
1062 // temp = temp->GetMethodAt(method_offset);
1063 __ movq(temp, Address(temp, method_offset));
1064 // call temp->GetEntryPoint();
1065 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001066 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001067}
1068
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001069void CodeGeneratorX86_64::RecordSimplePatch() {
1070 if (GetCompilerOptions().GetIncludePatchInformation()) {
1071 simple_patches_.emplace_back();
1072 __ Bind(&simple_patches_.back());
1073 }
1074}
1075
Vladimir Markoaad75c62016-10-03 08:46:48 +00001076void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1077 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001078 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001079 __ Bind(&string_patches_.back().label);
1080}
1081
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001082void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08001083 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001084 __ Bind(&type_patches_.back().label);
1085}
1086
Vladimir Markoaad75c62016-10-03 08:46:48 +00001087Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1088 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001089 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001090 return &string_patches_.back().label;
1091}
1092
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001093Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1094 uint32_t element_offset) {
1095 // Add a patch entry and return the label.
1096 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1097 return &pc_relative_dex_cache_patches_.back().label;
1098}
1099
Vladimir Markoaad75c62016-10-03 08:46:48 +00001100// The label points to the end of the "movl" or another instruction but the literal offset
1101// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1102constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1103
1104template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1105inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1106 const ArenaDeque<PatchInfo<Label>>& infos,
1107 ArenaVector<LinkerPatch>* linker_patches) {
1108 for (const PatchInfo<Label>& info : infos) {
1109 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1110 linker_patches->push_back(
1111 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1112 }
1113}
1114
Vladimir Marko58155012015-08-19 12:49:41 +00001115void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1116 DCHECK(linker_patches->empty());
1117 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001118 pc_relative_dex_cache_patches_.size() +
1119 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001120 string_patches_.size() +
1121 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001122 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1124 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001125 for (const Label& label : simple_patches_) {
1126 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1127 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1128 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001129 if (!GetCompilerOptions().IsBootImage()) {
1130 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1131 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001132 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001133 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001134 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001135 // These are always PC-relative, see GetSupportedLoadClassKind().
1136 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001137}
1138
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001139void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001140 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141}
1142
1143void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001144 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001145}
1146
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001147size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1148 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1149 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001150}
1151
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001152size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1153 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1154 return kX86_64WordSize;
1155}
1156
1157size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1158 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1159 return kX86_64WordSize;
1160}
1161
1162size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1163 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1164 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001165}
1166
Calin Juravle175dc732015-08-25 15:42:32 +01001167void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1168 HInstruction* instruction,
1169 uint32_t dex_pc,
1170 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001171 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001172 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1173 if (EntrypointRequiresStackMap(entrypoint)) {
1174 RecordPcInfo(instruction, dex_pc, slow_path);
1175 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001176}
1177
Roland Levillaindec8f632016-07-22 17:10:06 +01001178void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1179 HInstruction* instruction,
1180 SlowPathCode* slow_path) {
1181 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001182 GenerateInvokeRuntime(entry_point_offset);
1183}
1184
1185void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001186 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1187}
1188
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001189static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001190// Use a fake return address register to mimic Quick.
1191static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001192CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001193 const X86_64InstructionSetFeatures& isa_features,
1194 const CompilerOptions& compiler_options,
1195 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001196 : CodeGenerator(graph,
1197 kNumberOfCpuRegisters,
1198 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001199 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001200 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1201 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001202 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001203 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1204 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001205 compiler_options,
1206 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001207 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001208 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001209 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001210 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001211 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001212 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001213 constant_area_start_(0),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001214 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001215 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1216 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001217 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001218 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001219 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1220 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001221 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1222}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001223
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001224InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1225 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001226 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001227 assembler_(codegen->GetAssembler()),
1228 codegen_(codegen) {}
1229
David Brazdil58282f42016-01-14 12:45:10 +00001230void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001232 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001233
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001234 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001235 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001236}
1237
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001238static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001239 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001240}
David Srbecky9d8606d2015-04-12 09:35:32 +01001241
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001242static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001243 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001244}
1245
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001246void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001247 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001248 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001249 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001250 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001251 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001252
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001253 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001254 __ testq(CpuRegister(RAX), Address(
1255 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001256 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001257 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001258
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001259 if (HasEmptyFrame()) {
1260 return;
1261 }
1262
Nicolas Geoffray98893962015-01-21 12:32:32 +00001263 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001264 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001265 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001266 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001267 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1268 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001269 }
1270 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001271
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001272 int adjust = GetFrameSize() - GetCoreSpillSize();
1273 __ subq(CpuRegister(RSP), Immediate(adjust));
1274 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001275 uint32_t xmm_spill_location = GetFpuSpillStart();
1276 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001277
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001278 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1279 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001280 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1281 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1282 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001283 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001284 }
1285
Mingyao Yang063fc772016-08-02 11:02:54 -07001286 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1287 // Initialize should_deoptimize flag to 0.
1288 __ movl(Address(CpuRegister(RSP), xmm_spill_location - kShouldDeoptimizeFlagSize),
1289 Immediate(0));
1290 }
1291
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001292 // Save the current method if we need it. Note that we do not
1293 // do this in HCurrentMethod, as the instruction might have been removed
1294 // in the SSA graph.
1295 if (RequiresCurrentMethod()) {
1296 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1297 CpuRegister(kMethodRegisterArgument));
1298 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001299}
1300
1301void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001302 __ cfi().RememberState();
1303 if (!HasEmptyFrame()) {
1304 uint32_t xmm_spill_location = GetFpuSpillStart();
1305 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1306 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1307 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1308 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1309 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1310 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1311 }
1312 }
1313
1314 int adjust = GetFrameSize() - GetCoreSpillSize();
1315 __ addq(CpuRegister(RSP), Immediate(adjust));
1316 __ cfi().AdjustCFAOffset(-adjust);
1317
1318 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1319 Register reg = kCoreCalleeSaves[i];
1320 if (allocated_registers_.ContainsCoreRegister(reg)) {
1321 __ popq(CpuRegister(reg));
1322 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1323 __ cfi().Restore(DWARFReg(reg));
1324 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001325 }
1326 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001327 __ ret();
1328 __ cfi().RestoreState();
1329 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330}
1331
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001332void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1333 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334}
1335
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336void CodeGeneratorX86_64::Move(Location destination, Location source) {
1337 if (source.Equals(destination)) {
1338 return;
1339 }
1340 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001341 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001342 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001343 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001344 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001345 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001346 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001347 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1348 } else if (source.IsConstant()) {
1349 HConstant* constant = source.GetConstant();
1350 if (constant->IsLongConstant()) {
1351 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1352 } else {
1353 Load32BitValue(dest, GetInt32ValueOf(constant));
1354 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001355 } else {
1356 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001357 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001358 }
1359 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001360 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001361 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001362 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001363 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001364 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1365 } else if (source.IsConstant()) {
1366 HConstant* constant = source.GetConstant();
1367 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1368 if (constant->IsFloatConstant()) {
1369 Load32BitValue(dest, static_cast<int32_t>(value));
1370 } else {
1371 Load64BitValue(dest, value);
1372 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001373 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001374 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001375 } else {
1376 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001377 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001378 }
1379 } else if (destination.IsStackSlot()) {
1380 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001381 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001382 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001383 } else if (source.IsFpuRegister()) {
1384 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001385 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001386 } else if (source.IsConstant()) {
1387 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001388 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001389 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001390 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001391 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001392 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1393 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001394 }
1395 } else {
1396 DCHECK(destination.IsDoubleStackSlot());
1397 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001398 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001399 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001400 } else if (source.IsFpuRegister()) {
1401 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001402 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001403 } else if (source.IsConstant()) {
1404 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001405 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1406 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001407 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001408 } else {
1409 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001410 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1411 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001412 }
1413 }
1414}
1415
Calin Juravle175dc732015-08-25 15:42:32 +01001416void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1417 DCHECK(location.IsRegister());
1418 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1419}
1420
Calin Juravlee460d1d2015-09-29 04:52:17 +01001421void CodeGeneratorX86_64::MoveLocation(
1422 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1423 Move(dst, src);
1424}
1425
1426void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1427 if (location.IsRegister()) {
1428 locations->AddTemp(location);
1429 } else {
1430 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1431 }
1432}
1433
David Brazdilfc6a86a2015-06-26 10:33:45 +00001434void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001435 DCHECK(!successor->IsExitBlock());
1436
1437 HBasicBlock* block = got->GetBlock();
1438 HInstruction* previous = got->GetPrevious();
1439
1440 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001441 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001442 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1443 return;
1444 }
1445
1446 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1447 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1448 }
1449 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001450 __ jmp(codegen_->GetLabelOf(successor));
1451 }
1452}
1453
David Brazdilfc6a86a2015-06-26 10:33:45 +00001454void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1455 got->SetLocations(nullptr);
1456}
1457
1458void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1459 HandleGoto(got, got->GetSuccessor());
1460}
1461
1462void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1463 try_boundary->SetLocations(nullptr);
1464}
1465
1466void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1467 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1468 if (!successor->IsExitBlock()) {
1469 HandleGoto(try_boundary, successor);
1470 }
1471}
1472
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001473void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1474 exit->SetLocations(nullptr);
1475}
1476
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001477void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001478}
1479
Mark Mendell152408f2015-12-31 12:28:50 -05001480template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001481void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001482 LabelType* true_label,
1483 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001484 if (cond->IsFPConditionTrueIfNaN()) {
1485 __ j(kUnordered, true_label);
1486 } else if (cond->IsFPConditionFalseIfNaN()) {
1487 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001488 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001489 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001490}
1491
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001492void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001493 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001494
Mark Mendellc4701932015-04-10 13:18:51 -04001495 Location left = locations->InAt(0);
1496 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001497 Primitive::Type type = condition->InputAt(0)->GetType();
1498 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001499 case Primitive::kPrimBoolean:
1500 case Primitive::kPrimByte:
1501 case Primitive::kPrimChar:
1502 case Primitive::kPrimShort:
1503 case Primitive::kPrimInt:
1504 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001505 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001506 break;
1507 }
Mark Mendellc4701932015-04-10 13:18:51 -04001508 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001509 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001510 break;
1511 }
1512 case Primitive::kPrimFloat: {
1513 if (right.IsFpuRegister()) {
1514 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1515 } else if (right.IsConstant()) {
1516 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1517 codegen_->LiteralFloatAddress(
1518 right.GetConstant()->AsFloatConstant()->GetValue()));
1519 } else {
1520 DCHECK(right.IsStackSlot());
1521 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1522 Address(CpuRegister(RSP), right.GetStackIndex()));
1523 }
Mark Mendellc4701932015-04-10 13:18:51 -04001524 break;
1525 }
1526 case Primitive::kPrimDouble: {
1527 if (right.IsFpuRegister()) {
1528 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1529 } else if (right.IsConstant()) {
1530 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1531 codegen_->LiteralDoubleAddress(
1532 right.GetConstant()->AsDoubleConstant()->GetValue()));
1533 } else {
1534 DCHECK(right.IsDoubleStackSlot());
1535 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1536 Address(CpuRegister(RSP), right.GetStackIndex()));
1537 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001538 break;
1539 }
1540 default:
1541 LOG(FATAL) << "Unexpected condition type " << type;
1542 }
1543}
1544
1545template<class LabelType>
1546void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1547 LabelType* true_target_in,
1548 LabelType* false_target_in) {
1549 // Generated branching requires both targets to be explicit. If either of the
1550 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1551 LabelType fallthrough_target;
1552 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1553 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1554
1555 // Generate the comparison to set the CC.
1556 GenerateCompareTest(condition);
1557
1558 // Now generate the correct jump(s).
1559 Primitive::Type type = condition->InputAt(0)->GetType();
1560 switch (type) {
1561 case Primitive::kPrimLong: {
1562 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1563 break;
1564 }
1565 case Primitive::kPrimFloat: {
1566 GenerateFPJumps(condition, true_target, false_target);
1567 break;
1568 }
1569 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001570 GenerateFPJumps(condition, true_target, false_target);
1571 break;
1572 }
1573 default:
1574 LOG(FATAL) << "Unexpected condition type " << type;
1575 }
1576
David Brazdil0debae72015-11-12 18:37:00 +00001577 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001578 __ jmp(false_target);
1579 }
David Brazdil0debae72015-11-12 18:37:00 +00001580
1581 if (fallthrough_target.IsLinked()) {
1582 __ Bind(&fallthrough_target);
1583 }
Mark Mendellc4701932015-04-10 13:18:51 -04001584}
1585
David Brazdil0debae72015-11-12 18:37:00 +00001586static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1587 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1588 // are set only strictly before `branch`. We can't use the eflags on long
1589 // conditions if they are materialized due to the complex branching.
1590 return cond->IsCondition() &&
1591 cond->GetNext() == branch &&
1592 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1593}
1594
Mark Mendell152408f2015-12-31 12:28:50 -05001595template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001596void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001597 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001598 LabelType* true_target,
1599 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001600 HInstruction* cond = instruction->InputAt(condition_input_index);
1601
1602 if (true_target == nullptr && false_target == nullptr) {
1603 // Nothing to do. The code always falls through.
1604 return;
1605 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001606 // Constant condition, statically compared against "true" (integer value 1).
1607 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001608 if (true_target != nullptr) {
1609 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001610 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001611 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001612 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001613 if (false_target != nullptr) {
1614 __ jmp(false_target);
1615 }
1616 }
1617 return;
1618 }
1619
1620 // The following code generates these patterns:
1621 // (1) true_target == nullptr && false_target != nullptr
1622 // - opposite condition true => branch to false_target
1623 // (2) true_target != nullptr && false_target == nullptr
1624 // - condition true => branch to true_target
1625 // (3) true_target != nullptr && false_target != nullptr
1626 // - condition true => branch to true_target
1627 // - branch to false_target
1628 if (IsBooleanValueOrMaterializedCondition(cond)) {
1629 if (AreEflagsSetFrom(cond, instruction)) {
1630 if (true_target == nullptr) {
1631 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1632 } else {
1633 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1634 }
1635 } else {
1636 // Materialized condition, compare against 0.
1637 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1638 if (lhs.IsRegister()) {
1639 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1640 } else {
1641 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1642 }
1643 if (true_target == nullptr) {
1644 __ j(kEqual, false_target);
1645 } else {
1646 __ j(kNotEqual, true_target);
1647 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001648 }
1649 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001650 // Condition has not been materialized, use its inputs as the
1651 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001652 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001653
David Brazdil0debae72015-11-12 18:37:00 +00001654 // If this is a long or FP comparison that has been folded into
1655 // the HCondition, generate the comparison directly.
1656 Primitive::Type type = condition->InputAt(0)->GetType();
1657 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1658 GenerateCompareTestAndBranch(condition, true_target, false_target);
1659 return;
1660 }
1661
1662 Location lhs = condition->GetLocations()->InAt(0);
1663 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001664 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001665 if (true_target == nullptr) {
1666 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1667 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001668 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001669 }
Dave Allison20dfc792014-06-16 20:44:29 -07001670 }
David Brazdil0debae72015-11-12 18:37:00 +00001671
1672 // If neither branch falls through (case 3), the conditional branch to `true_target`
1673 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1674 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001675 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001676 }
1677}
1678
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001679void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001680 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1681 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001682 locations->SetInAt(0, Location::Any());
1683 }
1684}
1685
1686void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001687 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1688 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1689 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1690 nullptr : codegen_->GetLabelOf(true_successor);
1691 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1692 nullptr : codegen_->GetLabelOf(false_successor);
1693 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001694}
1695
1696void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1697 LocationSummary* locations = new (GetGraph()->GetArena())
1698 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001699 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001700 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001701 locations->SetInAt(0, Location::Any());
1702 }
1703}
1704
1705void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001706 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001707 GenerateTestAndBranch<Label>(deoptimize,
1708 /* condition_input_index */ 0,
1709 slow_path->GetEntryLabel(),
1710 /* false_target */ nullptr);
1711}
1712
Mingyao Yang063fc772016-08-02 11:02:54 -07001713void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1714 LocationSummary* locations = new (GetGraph()->GetArena())
1715 LocationSummary(flag, LocationSummary::kNoCall);
1716 locations->SetOut(Location::RequiresRegister());
1717}
1718
1719void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1720 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1721 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1722}
1723
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001724static bool SelectCanUseCMOV(HSelect* select) {
1725 // There are no conditional move instructions for XMMs.
1726 if (Primitive::IsFloatingPointType(select->GetType())) {
1727 return false;
1728 }
1729
1730 // A FP condition doesn't generate the single CC that we need.
1731 HInstruction* condition = select->GetCondition();
1732 if (condition->IsCondition() &&
1733 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1734 return false;
1735 }
1736
1737 // We can generate a CMOV for this Select.
1738 return true;
1739}
1740
David Brazdil74eb1b22015-12-14 11:44:01 +00001741void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1742 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1743 if (Primitive::IsFloatingPointType(select->GetType())) {
1744 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001745 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001746 } else {
1747 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001748 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001749 if (select->InputAt(1)->IsConstant()) {
1750 locations->SetInAt(1, Location::RequiresRegister());
1751 } else {
1752 locations->SetInAt(1, Location::Any());
1753 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001754 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001755 locations->SetInAt(1, Location::Any());
1756 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001757 }
1758 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1759 locations->SetInAt(2, Location::RequiresRegister());
1760 }
1761 locations->SetOut(Location::SameAsFirstInput());
1762}
1763
1764void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1765 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001766 if (SelectCanUseCMOV(select)) {
1767 // If both the condition and the source types are integer, we can generate
1768 // a CMOV to implement Select.
1769 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001770 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001771 DCHECK(locations->InAt(0).Equals(locations->Out()));
1772
1773 HInstruction* select_condition = select->GetCondition();
1774 Condition cond = kNotEqual;
1775
1776 // Figure out how to test the 'condition'.
1777 if (select_condition->IsCondition()) {
1778 HCondition* condition = select_condition->AsCondition();
1779 if (!condition->IsEmittedAtUseSite()) {
1780 // This was a previously materialized condition.
1781 // Can we use the existing condition code?
1782 if (AreEflagsSetFrom(condition, select)) {
1783 // Materialization was the previous instruction. Condition codes are right.
1784 cond = X86_64IntegerCondition(condition->GetCondition());
1785 } else {
1786 // No, we have to recreate the condition code.
1787 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1788 __ testl(cond_reg, cond_reg);
1789 }
1790 } else {
1791 GenerateCompareTest(condition);
1792 cond = X86_64IntegerCondition(condition->GetCondition());
1793 }
1794 } else {
1795 // Must be a boolean condition, which needs to be compared to 0.
1796 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1797 __ testl(cond_reg, cond_reg);
1798 }
1799
1800 // If the condition is true, overwrite the output, which already contains false.
1801 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001802 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1803 if (value_true_loc.IsRegister()) {
1804 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1805 } else {
1806 __ cmov(cond,
1807 value_false,
1808 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1809 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001810 } else {
1811 NearLabel false_target;
1812 GenerateTestAndBranch<NearLabel>(select,
1813 /* condition_input_index */ 2,
1814 /* true_target */ nullptr,
1815 &false_target);
1816 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1817 __ Bind(&false_target);
1818 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001819}
1820
David Srbecky0cf44932015-12-09 14:09:59 +00001821void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1822 new (GetGraph()->GetArena()) LocationSummary(info);
1823}
1824
David Srbeckyd28f4a02016-03-14 17:14:24 +00001825void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1826 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001827}
1828
1829void CodeGeneratorX86_64::GenerateNop() {
1830 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001831}
1832
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001833void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001834 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001835 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001836 // Handle the long/FP comparisons made in instruction simplification.
1837 switch (cond->InputAt(0)->GetType()) {
1838 case Primitive::kPrimLong:
1839 locations->SetInAt(0, Location::RequiresRegister());
1840 locations->SetInAt(1, Location::Any());
1841 break;
1842 case Primitive::kPrimFloat:
1843 case Primitive::kPrimDouble:
1844 locations->SetInAt(0, Location::RequiresFpuRegister());
1845 locations->SetInAt(1, Location::Any());
1846 break;
1847 default:
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 locations->SetInAt(1, Location::Any());
1850 break;
1851 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001852 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001853 locations->SetOut(Location::RequiresRegister());
1854 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001855}
1856
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001857void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001858 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001859 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001860 }
Mark Mendellc4701932015-04-10 13:18:51 -04001861
1862 LocationSummary* locations = cond->GetLocations();
1863 Location lhs = locations->InAt(0);
1864 Location rhs = locations->InAt(1);
1865 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001866 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001867
1868 switch (cond->InputAt(0)->GetType()) {
1869 default:
1870 // Integer case.
1871
1872 // Clear output register: setcc only sets the low byte.
1873 __ xorl(reg, reg);
1874
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001875 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001876 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001877 return;
1878 case Primitive::kPrimLong:
1879 // Clear output register: setcc only sets the low byte.
1880 __ xorl(reg, reg);
1881
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001882 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001883 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001884 return;
1885 case Primitive::kPrimFloat: {
1886 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1887 if (rhs.IsConstant()) {
1888 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1889 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1890 } else if (rhs.IsStackSlot()) {
1891 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1892 } else {
1893 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1894 }
1895 GenerateFPJumps(cond, &true_label, &false_label);
1896 break;
1897 }
1898 case Primitive::kPrimDouble: {
1899 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1900 if (rhs.IsConstant()) {
1901 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1902 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1903 } else if (rhs.IsDoubleStackSlot()) {
1904 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1905 } else {
1906 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1907 }
1908 GenerateFPJumps(cond, &true_label, &false_label);
1909 break;
1910 }
1911 }
1912
1913 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001914 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001915
Roland Levillain4fa13f62015-07-06 18:11:54 +01001916 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001917 __ Bind(&false_label);
1918 __ xorl(reg, reg);
1919 __ jmp(&done_label);
1920
Roland Levillain4fa13f62015-07-06 18:11:54 +01001921 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001922 __ Bind(&true_label);
1923 __ movl(reg, Immediate(1));
1924 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001925}
1926
1927void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001928 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001929}
1930
1931void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001932 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001933}
1934
1935void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001936 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001937}
1938
1939void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001940 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001941}
1942
1943void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001944 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001945}
1946
1947void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001948 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001949}
1950
1951void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001952 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001953}
1954
1955void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001956 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001957}
1958
1959void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001960 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001961}
1962
1963void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001964 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001965}
1966
1967void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001968 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001969}
1970
1971void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001973}
1974
Aart Bike9f37602015-10-09 11:15:55 -07001975void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001976 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001977}
1978
1979void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001980 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001981}
1982
1983void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001984 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001985}
1986
1987void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001989}
1990
1991void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001992 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001993}
1994
1995void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001996 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001997}
1998
1999void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002000 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002001}
2002
2003void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002004 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002005}
2006
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002007void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002008 LocationSummary* locations =
2009 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002010 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002011 case Primitive::kPrimBoolean:
2012 case Primitive::kPrimByte:
2013 case Primitive::kPrimShort:
2014 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002015 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002016 case Primitive::kPrimLong: {
2017 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002018 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002019 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2020 break;
2021 }
2022 case Primitive::kPrimFloat:
2023 case Primitive::kPrimDouble: {
2024 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002025 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002026 locations->SetOut(Location::RequiresRegister());
2027 break;
2028 }
2029 default:
2030 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2031 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002032}
2033
2034void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002035 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002036 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002037 Location left = locations->InAt(0);
2038 Location right = locations->InAt(1);
2039
Mark Mendell0c9497d2015-08-21 09:30:05 -04002040 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002041 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002042 Condition less_cond = kLess;
2043
Calin Juravleddb7df22014-11-25 20:56:51 +00002044 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002045 case Primitive::kPrimBoolean:
2046 case Primitive::kPrimByte:
2047 case Primitive::kPrimShort:
2048 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002049 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002050 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002051 break;
2052 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002053 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002054 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002055 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002056 }
2057 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002058 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2059 if (right.IsConstant()) {
2060 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2061 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2062 } else if (right.IsStackSlot()) {
2063 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2064 } else {
2065 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2066 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002067 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002068 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002069 break;
2070 }
2071 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002072 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2073 if (right.IsConstant()) {
2074 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2075 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2076 } else if (right.IsDoubleStackSlot()) {
2077 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2078 } else {
2079 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2080 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002081 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002082 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002083 break;
2084 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002085 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002086 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002087 }
Aart Bika19616e2016-02-01 18:57:58 -08002088
Calin Juravleddb7df22014-11-25 20:56:51 +00002089 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002090 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002091 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002092
Calin Juravle91debbc2014-11-26 19:01:09 +00002093 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002094 __ movl(out, Immediate(1));
2095 __ jmp(&done);
2096
2097 __ Bind(&less);
2098 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002099
2100 __ Bind(&done);
2101}
2102
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002103void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002104 LocationSummary* locations =
2105 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002106 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002107}
2108
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002109void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002110 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002111}
2112
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002113void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2114 LocationSummary* locations =
2115 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2116 locations->SetOut(Location::ConstantLocation(constant));
2117}
2118
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002119void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002120 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002121}
2122
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002123void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002124 LocationSummary* locations =
2125 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002126 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002127}
2128
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002129void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002130 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002131}
2132
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002133void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2134 LocationSummary* locations =
2135 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2136 locations->SetOut(Location::ConstantLocation(constant));
2137}
2138
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002139void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002140 // Will be generated at use site.
2141}
2142
2143void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2144 LocationSummary* locations =
2145 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2146 locations->SetOut(Location::ConstantLocation(constant));
2147}
2148
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002149void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2150 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002151 // Will be generated at use site.
2152}
2153
Calin Juravle27df7582015-04-17 19:12:31 +01002154void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2155 memory_barrier->SetLocations(nullptr);
2156}
2157
2158void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002159 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002160}
2161
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002162void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2163 ret->SetLocations(nullptr);
2164}
2165
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002166void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002167 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002168}
2169
2170void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002171 LocationSummary* locations =
2172 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002173 switch (ret->InputAt(0)->GetType()) {
2174 case Primitive::kPrimBoolean:
2175 case Primitive::kPrimByte:
2176 case Primitive::kPrimChar:
2177 case Primitive::kPrimShort:
2178 case Primitive::kPrimInt:
2179 case Primitive::kPrimNot:
2180 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002181 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002182 break;
2183
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002184 case Primitive::kPrimFloat:
2185 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002186 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002187 break;
2188
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002189 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002190 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002191 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002192}
2193
2194void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2195 if (kIsDebugBuild) {
2196 switch (ret->InputAt(0)->GetType()) {
2197 case Primitive::kPrimBoolean:
2198 case Primitive::kPrimByte:
2199 case Primitive::kPrimChar:
2200 case Primitive::kPrimShort:
2201 case Primitive::kPrimInt:
2202 case Primitive::kPrimNot:
2203 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002204 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002205 break;
2206
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002207 case Primitive::kPrimFloat:
2208 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002209 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002210 XMM0);
2211 break;
2212
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002213 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002214 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002215 }
2216 }
2217 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218}
2219
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002220Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2221 switch (type) {
2222 case Primitive::kPrimBoolean:
2223 case Primitive::kPrimByte:
2224 case Primitive::kPrimChar:
2225 case Primitive::kPrimShort:
2226 case Primitive::kPrimInt:
2227 case Primitive::kPrimNot:
2228 case Primitive::kPrimLong:
2229 return Location::RegisterLocation(RAX);
2230
2231 case Primitive::kPrimVoid:
2232 return Location::NoLocation();
2233
2234 case Primitive::kPrimDouble:
2235 case Primitive::kPrimFloat:
2236 return Location::FpuRegisterLocation(XMM0);
2237 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002238
2239 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002240}
2241
2242Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2243 return Location::RegisterLocation(kMethodRegisterArgument);
2244}
2245
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002246Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002247 switch (type) {
2248 case Primitive::kPrimBoolean:
2249 case Primitive::kPrimByte:
2250 case Primitive::kPrimChar:
2251 case Primitive::kPrimShort:
2252 case Primitive::kPrimInt:
2253 case Primitive::kPrimNot: {
2254 uint32_t index = gp_index_++;
2255 stack_index_++;
2256 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002257 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002258 } else {
2259 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2260 }
2261 }
2262
2263 case Primitive::kPrimLong: {
2264 uint32_t index = gp_index_;
2265 stack_index_ += 2;
2266 if (index < calling_convention.GetNumberOfRegisters()) {
2267 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002268 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002269 } else {
2270 gp_index_ += 2;
2271 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2272 }
2273 }
2274
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002275 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002276 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002277 stack_index_++;
2278 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002279 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002280 } else {
2281 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2282 }
2283 }
2284
2285 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002286 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002287 stack_index_ += 2;
2288 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002289 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002290 } else {
2291 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2292 }
2293 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002294
2295 case Primitive::kPrimVoid:
2296 LOG(FATAL) << "Unexpected parameter type " << type;
2297 break;
2298 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002299 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002300}
2301
Calin Juravle175dc732015-08-25 15:42:32 +01002302void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2303 // The trampoline uses the same calling convention as dex calling conventions,
2304 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2305 // the method_idx.
2306 HandleInvoke(invoke);
2307}
2308
2309void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2310 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2311}
2312
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002313void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002314 // Explicit clinit checks triggered by static invokes must have been pruned by
2315 // art::PrepareForRegisterAllocation.
2316 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002317
Mark Mendellfb8d2792015-03-31 22:16:59 -04002318 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002319 if (intrinsic.TryDispatch(invoke)) {
2320 return;
2321 }
2322
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002323 HandleInvoke(invoke);
2324}
2325
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002326static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2327 if (invoke->GetLocations()->Intrinsified()) {
2328 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2329 intrinsic.Dispatch(invoke);
2330 return true;
2331 }
2332 return false;
2333}
2334
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002335void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002336 // Explicit clinit checks triggered by static invokes must have been pruned by
2337 // art::PrepareForRegisterAllocation.
2338 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002339
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002340 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2341 return;
2342 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002343
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002344 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002345 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002346 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002347 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002348}
2349
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002350void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002351 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002352 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002353}
2354
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002355void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002356 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002357 if (intrinsic.TryDispatch(invoke)) {
2358 return;
2359 }
2360
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002361 HandleInvoke(invoke);
2362}
2363
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002364void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002365 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2366 return;
2367 }
2368
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002369 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002370 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002371 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002372}
2373
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002374void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2375 HandleInvoke(invoke);
2376 // Add the hidden argument.
2377 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2378}
2379
2380void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2381 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002382 LocationSummary* locations = invoke->GetLocations();
2383 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2384 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002385 Location receiver = locations->InAt(0);
2386 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2387
Roland Levillain0d5a2812015-11-13 10:07:31 +00002388 // Set the hidden argument. This is safe to do this here, as RAX
2389 // won't be modified thereafter, before the `call` instruction.
2390 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002391 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002392
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002393 if (receiver.IsStackSlot()) {
2394 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002395 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002396 __ movl(temp, Address(temp, class_offset));
2397 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002398 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002399 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002400 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002401 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002402 // Instead of simply (possibly) unpoisoning `temp` here, we should
2403 // emit a read barrier for the previous class reference load.
2404 // However this is not required in practice, as this is an
2405 // intermediate/temporary reference and because the current
2406 // concurrent copying collector keeps the from-space memory
2407 // intact/accessible until the end of the marking phase (the
2408 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002409 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002410 // temp = temp->GetAddressOfIMT()
2411 __ movq(temp,
2412 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2413 // temp = temp->GetImtEntryAt(method_offset);
2414 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002415 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002416 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002417 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002418 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002419 __ call(Address(
2420 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002421
2422 DCHECK(!codegen_->IsLeafMethod());
2423 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2424}
2425
Roland Levillain88cb1752014-10-20 16:36:47 +01002426void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2427 LocationSummary* locations =
2428 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2429 switch (neg->GetResultType()) {
2430 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002431 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002432 locations->SetInAt(0, Location::RequiresRegister());
2433 locations->SetOut(Location::SameAsFirstInput());
2434 break;
2435
Roland Levillain88cb1752014-10-20 16:36:47 +01002436 case Primitive::kPrimFloat:
2437 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002438 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002439 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002440 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002441 break;
2442
2443 default:
2444 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2445 }
2446}
2447
2448void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2449 LocationSummary* locations = neg->GetLocations();
2450 Location out = locations->Out();
2451 Location in = locations->InAt(0);
2452 switch (neg->GetResultType()) {
2453 case Primitive::kPrimInt:
2454 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002455 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002456 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002457 break;
2458
2459 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002460 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002461 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002462 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002463 break;
2464
Roland Levillain5368c212014-11-27 15:03:41 +00002465 case Primitive::kPrimFloat: {
2466 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002467 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002468 // Implement float negation with an exclusive or with value
2469 // 0x80000000 (mask for bit 31, representing the sign of a
2470 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002471 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002472 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002473 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002474 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002475
Roland Levillain5368c212014-11-27 15:03:41 +00002476 case Primitive::kPrimDouble: {
2477 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002478 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002479 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002480 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002481 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002482 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002483 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002484 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002485 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002486
2487 default:
2488 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2489 }
2490}
2491
Roland Levillaindff1f282014-11-05 14:15:05 +00002492void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2493 LocationSummary* locations =
2494 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2495 Primitive::Type result_type = conversion->GetResultType();
2496 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002497 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002498
David Brazdilb2bd1c52015-03-25 11:17:37 +00002499 // The Java language does not allow treating boolean as an integral type but
2500 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002501
Roland Levillaindff1f282014-11-05 14:15:05 +00002502 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002503 case Primitive::kPrimByte:
2504 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002505 case Primitive::kPrimLong:
2506 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002507 case Primitive::kPrimBoolean:
2508 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002509 case Primitive::kPrimShort:
2510 case Primitive::kPrimInt:
2511 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002512 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002513 locations->SetInAt(0, Location::Any());
2514 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2515 break;
2516
2517 default:
2518 LOG(FATAL) << "Unexpected type conversion from " << input_type
2519 << " to " << result_type;
2520 }
2521 break;
2522
Roland Levillain01a8d712014-11-14 16:27:39 +00002523 case Primitive::kPrimShort:
2524 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002525 case Primitive::kPrimLong:
2526 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002527 case Primitive::kPrimBoolean:
2528 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002529 case Primitive::kPrimByte:
2530 case Primitive::kPrimInt:
2531 case Primitive::kPrimChar:
2532 // Processing a Dex `int-to-short' instruction.
2533 locations->SetInAt(0, Location::Any());
2534 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2535 break;
2536
2537 default:
2538 LOG(FATAL) << "Unexpected type conversion from " << input_type
2539 << " to " << result_type;
2540 }
2541 break;
2542
Roland Levillain946e1432014-11-11 17:35:19 +00002543 case Primitive::kPrimInt:
2544 switch (input_type) {
2545 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002546 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002547 locations->SetInAt(0, Location::Any());
2548 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2549 break;
2550
2551 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002552 // Processing a Dex `float-to-int' instruction.
2553 locations->SetInAt(0, Location::RequiresFpuRegister());
2554 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002555 break;
2556
Roland Levillain946e1432014-11-11 17:35:19 +00002557 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002558 // Processing a Dex `double-to-int' instruction.
2559 locations->SetInAt(0, Location::RequiresFpuRegister());
2560 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002561 break;
2562
2563 default:
2564 LOG(FATAL) << "Unexpected type conversion from " << input_type
2565 << " to " << result_type;
2566 }
2567 break;
2568
Roland Levillaindff1f282014-11-05 14:15:05 +00002569 case Primitive::kPrimLong:
2570 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002571 case Primitive::kPrimBoolean:
2572 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002573 case Primitive::kPrimByte:
2574 case Primitive::kPrimShort:
2575 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002576 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002577 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002578 // TODO: We would benefit from a (to-be-implemented)
2579 // Location::RegisterOrStackSlot requirement for this input.
2580 locations->SetInAt(0, Location::RequiresRegister());
2581 locations->SetOut(Location::RequiresRegister());
2582 break;
2583
2584 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002585 // Processing a Dex `float-to-long' instruction.
2586 locations->SetInAt(0, Location::RequiresFpuRegister());
2587 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002588 break;
2589
Roland Levillaindff1f282014-11-05 14:15:05 +00002590 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002591 // Processing a Dex `double-to-long' instruction.
2592 locations->SetInAt(0, Location::RequiresFpuRegister());
2593 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002594 break;
2595
2596 default:
2597 LOG(FATAL) << "Unexpected type conversion from " << input_type
2598 << " to " << result_type;
2599 }
2600 break;
2601
Roland Levillain981e4542014-11-14 11:47:14 +00002602 case Primitive::kPrimChar:
2603 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002604 case Primitive::kPrimLong:
2605 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002606 case Primitive::kPrimBoolean:
2607 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002608 case Primitive::kPrimByte:
2609 case Primitive::kPrimShort:
2610 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002611 // Processing a Dex `int-to-char' instruction.
2612 locations->SetInAt(0, Location::Any());
2613 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2614 break;
2615
2616 default:
2617 LOG(FATAL) << "Unexpected type conversion from " << input_type
2618 << " to " << result_type;
2619 }
2620 break;
2621
Roland Levillaindff1f282014-11-05 14:15:05 +00002622 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002623 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002624 case Primitive::kPrimBoolean:
2625 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002626 case Primitive::kPrimByte:
2627 case Primitive::kPrimShort:
2628 case Primitive::kPrimInt:
2629 case Primitive::kPrimChar:
2630 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002631 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002632 locations->SetOut(Location::RequiresFpuRegister());
2633 break;
2634
2635 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002636 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002637 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002638 locations->SetOut(Location::RequiresFpuRegister());
2639 break;
2640
Roland Levillaincff13742014-11-17 14:32:17 +00002641 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002642 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002643 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002644 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002645 break;
2646
2647 default:
2648 LOG(FATAL) << "Unexpected type conversion from " << input_type
2649 << " to " << result_type;
2650 };
2651 break;
2652
Roland Levillaindff1f282014-11-05 14:15:05 +00002653 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002654 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002655 case Primitive::kPrimBoolean:
2656 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002657 case Primitive::kPrimByte:
2658 case Primitive::kPrimShort:
2659 case Primitive::kPrimInt:
2660 case Primitive::kPrimChar:
2661 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002662 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002663 locations->SetOut(Location::RequiresFpuRegister());
2664 break;
2665
2666 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002667 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002668 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002669 locations->SetOut(Location::RequiresFpuRegister());
2670 break;
2671
Roland Levillaincff13742014-11-17 14:32:17 +00002672 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002673 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002674 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002675 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002676 break;
2677
2678 default:
2679 LOG(FATAL) << "Unexpected type conversion from " << input_type
2680 << " to " << result_type;
2681 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002682 break;
2683
2684 default:
2685 LOG(FATAL) << "Unexpected type conversion from " << input_type
2686 << " to " << result_type;
2687 }
2688}
2689
2690void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2691 LocationSummary* locations = conversion->GetLocations();
2692 Location out = locations->Out();
2693 Location in = locations->InAt(0);
2694 Primitive::Type result_type = conversion->GetResultType();
2695 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002696 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002697 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002698 case Primitive::kPrimByte:
2699 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002700 case Primitive::kPrimLong:
2701 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002702 case Primitive::kPrimBoolean:
2703 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002704 case Primitive::kPrimShort:
2705 case Primitive::kPrimInt:
2706 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002707 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002708 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002709 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002710 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002711 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002712 Address(CpuRegister(RSP), in.GetStackIndex()));
2713 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002714 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002715 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002716 }
2717 break;
2718
2719 default:
2720 LOG(FATAL) << "Unexpected type conversion from " << input_type
2721 << " to " << result_type;
2722 }
2723 break;
2724
Roland Levillain01a8d712014-11-14 16:27:39 +00002725 case Primitive::kPrimShort:
2726 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002727 case Primitive::kPrimLong:
2728 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002729 case Primitive::kPrimBoolean:
2730 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002731 case Primitive::kPrimByte:
2732 case Primitive::kPrimInt:
2733 case Primitive::kPrimChar:
2734 // Processing a Dex `int-to-short' instruction.
2735 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002736 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002737 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002738 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002739 Address(CpuRegister(RSP), in.GetStackIndex()));
2740 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002741 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002742 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002743 }
2744 break;
2745
2746 default:
2747 LOG(FATAL) << "Unexpected type conversion from " << input_type
2748 << " to " << result_type;
2749 }
2750 break;
2751
Roland Levillain946e1432014-11-11 17:35:19 +00002752 case Primitive::kPrimInt:
2753 switch (input_type) {
2754 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002755 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002756 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002757 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002758 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002759 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002760 Address(CpuRegister(RSP), in.GetStackIndex()));
2761 } else {
2762 DCHECK(in.IsConstant());
2763 DCHECK(in.GetConstant()->IsLongConstant());
2764 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002765 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002766 }
2767 break;
2768
Roland Levillain3f8f9362014-12-02 17:45:01 +00002769 case Primitive::kPrimFloat: {
2770 // Processing a Dex `float-to-int' instruction.
2771 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2772 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002773 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002774
2775 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002776 // if input >= (float)INT_MAX goto done
2777 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002778 __ j(kAboveEqual, &done);
2779 // if input == NaN goto nan
2780 __ j(kUnordered, &nan);
2781 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002782 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002783 __ jmp(&done);
2784 __ Bind(&nan);
2785 // output = 0
2786 __ xorl(output, output);
2787 __ Bind(&done);
2788 break;
2789 }
2790
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002791 case Primitive::kPrimDouble: {
2792 // Processing a Dex `double-to-int' instruction.
2793 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2794 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002795 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002796
2797 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002798 // if input >= (double)INT_MAX goto done
2799 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002800 __ j(kAboveEqual, &done);
2801 // if input == NaN goto nan
2802 __ j(kUnordered, &nan);
2803 // output = double-to-int-truncate(input)
2804 __ cvttsd2si(output, input);
2805 __ jmp(&done);
2806 __ Bind(&nan);
2807 // output = 0
2808 __ xorl(output, output);
2809 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002810 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002811 }
Roland Levillain946e1432014-11-11 17:35:19 +00002812
2813 default:
2814 LOG(FATAL) << "Unexpected type conversion from " << input_type
2815 << " to " << result_type;
2816 }
2817 break;
2818
Roland Levillaindff1f282014-11-05 14:15:05 +00002819 case Primitive::kPrimLong:
2820 switch (input_type) {
2821 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002822 case Primitive::kPrimBoolean:
2823 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002824 case Primitive::kPrimByte:
2825 case Primitive::kPrimShort:
2826 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002827 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002828 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002829 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002830 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002831 break;
2832
Roland Levillain624279f2014-12-04 11:54:28 +00002833 case Primitive::kPrimFloat: {
2834 // Processing a Dex `float-to-long' instruction.
2835 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2836 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002837 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002838
Mark Mendell92e83bf2015-05-07 11:25:03 -04002839 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002840 // if input >= (float)LONG_MAX goto done
2841 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002842 __ j(kAboveEqual, &done);
2843 // if input == NaN goto nan
2844 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002845 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002846 __ cvttss2si(output, input, true);
2847 __ jmp(&done);
2848 __ Bind(&nan);
2849 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002850 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002851 __ Bind(&done);
2852 break;
2853 }
2854
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002855 case Primitive::kPrimDouble: {
2856 // Processing a Dex `double-to-long' instruction.
2857 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2858 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002859 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002860
Mark Mendell92e83bf2015-05-07 11:25:03 -04002861 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002862 // if input >= (double)LONG_MAX goto done
2863 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002864 __ j(kAboveEqual, &done);
2865 // if input == NaN goto nan
2866 __ j(kUnordered, &nan);
2867 // output = double-to-long-truncate(input)
2868 __ cvttsd2si(output, input, true);
2869 __ jmp(&done);
2870 __ Bind(&nan);
2871 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002872 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002873 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002874 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002875 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002876
2877 default:
2878 LOG(FATAL) << "Unexpected type conversion from " << input_type
2879 << " to " << result_type;
2880 }
2881 break;
2882
Roland Levillain981e4542014-11-14 11:47:14 +00002883 case Primitive::kPrimChar:
2884 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002885 case Primitive::kPrimLong:
2886 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002887 case Primitive::kPrimBoolean:
2888 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002889 case Primitive::kPrimByte:
2890 case Primitive::kPrimShort:
2891 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002892 // Processing a Dex `int-to-char' instruction.
2893 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002894 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002895 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002896 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002897 Address(CpuRegister(RSP), in.GetStackIndex()));
2898 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002899 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002900 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002901 }
2902 break;
2903
2904 default:
2905 LOG(FATAL) << "Unexpected type conversion from " << input_type
2906 << " to " << result_type;
2907 }
2908 break;
2909
Roland Levillaindff1f282014-11-05 14:15:05 +00002910 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002911 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002912 case Primitive::kPrimBoolean:
2913 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002914 case Primitive::kPrimByte:
2915 case Primitive::kPrimShort:
2916 case Primitive::kPrimInt:
2917 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002918 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002919 if (in.IsRegister()) {
2920 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2921 } else if (in.IsConstant()) {
2922 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2923 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002924 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002925 } else {
2926 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2927 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2928 }
Roland Levillaincff13742014-11-17 14:32:17 +00002929 break;
2930
2931 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002932 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002933 if (in.IsRegister()) {
2934 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2935 } else if (in.IsConstant()) {
2936 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2937 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002938 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002939 } else {
2940 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2941 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2942 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002943 break;
2944
Roland Levillaincff13742014-11-17 14:32:17 +00002945 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002946 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002947 if (in.IsFpuRegister()) {
2948 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2949 } else if (in.IsConstant()) {
2950 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2951 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002952 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002953 } else {
2954 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2955 Address(CpuRegister(RSP), in.GetStackIndex()));
2956 }
Roland Levillaincff13742014-11-17 14:32:17 +00002957 break;
2958
2959 default:
2960 LOG(FATAL) << "Unexpected type conversion from " << input_type
2961 << " to " << result_type;
2962 };
2963 break;
2964
Roland Levillaindff1f282014-11-05 14:15:05 +00002965 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002966 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002967 case Primitive::kPrimBoolean:
2968 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002969 case Primitive::kPrimByte:
2970 case Primitive::kPrimShort:
2971 case Primitive::kPrimInt:
2972 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002973 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002974 if (in.IsRegister()) {
2975 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2976 } else if (in.IsConstant()) {
2977 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2978 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002979 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002980 } else {
2981 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2982 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2983 }
Roland Levillaincff13742014-11-17 14:32:17 +00002984 break;
2985
2986 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002987 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002988 if (in.IsRegister()) {
2989 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2990 } else if (in.IsConstant()) {
2991 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2992 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002993 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002994 } else {
2995 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2996 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2997 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002998 break;
2999
Roland Levillaincff13742014-11-17 14:32:17 +00003000 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003001 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003002 if (in.IsFpuRegister()) {
3003 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3004 } else if (in.IsConstant()) {
3005 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3006 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003007 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003008 } else {
3009 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3010 Address(CpuRegister(RSP), in.GetStackIndex()));
3011 }
Roland Levillaincff13742014-11-17 14:32:17 +00003012 break;
3013
3014 default:
3015 LOG(FATAL) << "Unexpected type conversion from " << input_type
3016 << " to " << result_type;
3017 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003018 break;
3019
3020 default:
3021 LOG(FATAL) << "Unexpected type conversion from " << input_type
3022 << " to " << result_type;
3023 }
3024}
3025
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003027 LocationSummary* locations =
3028 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003029 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003030 case Primitive::kPrimInt: {
3031 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003032 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3033 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003034 break;
3035 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003036
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003038 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003039 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003040 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003042 break;
3043 }
3044
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003045 case Primitive::kPrimDouble:
3046 case Primitive::kPrimFloat: {
3047 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003048 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003049 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003050 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003051 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003052
3053 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003054 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003055 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003056}
3057
3058void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3059 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003060 Location first = locations->InAt(0);
3061 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003062 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003063
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003065 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003066 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003067 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3068 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003069 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3070 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003071 } else {
3072 __ leal(out.AsRegister<CpuRegister>(), Address(
3073 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3074 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003075 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003076 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3077 __ addl(out.AsRegister<CpuRegister>(),
3078 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3079 } else {
3080 __ leal(out.AsRegister<CpuRegister>(), Address(
3081 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3082 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003083 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003084 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003085 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003086 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003087 break;
3088 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003090 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003091 if (second.IsRegister()) {
3092 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3093 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003094 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3095 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003096 } else {
3097 __ leaq(out.AsRegister<CpuRegister>(), Address(
3098 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3099 }
3100 } else {
3101 DCHECK(second.IsConstant());
3102 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3103 int32_t int32_value = Low32Bits(value);
3104 DCHECK_EQ(int32_value, value);
3105 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3106 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3107 } else {
3108 __ leaq(out.AsRegister<CpuRegister>(), Address(
3109 first.AsRegister<CpuRegister>(), int32_value));
3110 }
3111 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003112 break;
3113 }
3114
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003115 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003116 if (second.IsFpuRegister()) {
3117 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3118 } else if (second.IsConstant()) {
3119 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003120 codegen_->LiteralFloatAddress(
3121 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003122 } else {
3123 DCHECK(second.IsStackSlot());
3124 __ addss(first.AsFpuRegister<XmmRegister>(),
3125 Address(CpuRegister(RSP), second.GetStackIndex()));
3126 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003127 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003128 }
3129
3130 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003131 if (second.IsFpuRegister()) {
3132 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3133 } else if (second.IsConstant()) {
3134 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003135 codegen_->LiteralDoubleAddress(
3136 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003137 } else {
3138 DCHECK(second.IsDoubleStackSlot());
3139 __ addsd(first.AsFpuRegister<XmmRegister>(),
3140 Address(CpuRegister(RSP), second.GetStackIndex()));
3141 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003142 break;
3143 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003144
3145 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003146 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003147 }
3148}
3149
3150void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003151 LocationSummary* locations =
3152 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003153 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003154 case Primitive::kPrimInt: {
3155 locations->SetInAt(0, Location::RequiresRegister());
3156 locations->SetInAt(1, Location::Any());
3157 locations->SetOut(Location::SameAsFirstInput());
3158 break;
3159 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003160 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003161 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003162 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003163 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003164 break;
3165 }
Calin Juravle11351682014-10-23 15:38:15 +01003166 case Primitive::kPrimFloat:
3167 case Primitive::kPrimDouble: {
3168 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003169 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003170 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003171 break;
Calin Juravle11351682014-10-23 15:38:15 +01003172 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003173 default:
Calin Juravle11351682014-10-23 15:38:15 +01003174 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003175 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003176}
3177
3178void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3179 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003180 Location first = locations->InAt(0);
3181 Location second = locations->InAt(1);
3182 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003183 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003184 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003185 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003186 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003187 } else if (second.IsConstant()) {
3188 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003189 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003190 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003191 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003192 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003193 break;
3194 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003195 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003196 if (second.IsConstant()) {
3197 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3198 DCHECK(IsInt<32>(value));
3199 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3200 } else {
3201 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3202 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003203 break;
3204 }
3205
Calin Juravle11351682014-10-23 15:38:15 +01003206 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003207 if (second.IsFpuRegister()) {
3208 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3209 } else if (second.IsConstant()) {
3210 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003211 codegen_->LiteralFloatAddress(
3212 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003213 } else {
3214 DCHECK(second.IsStackSlot());
3215 __ subss(first.AsFpuRegister<XmmRegister>(),
3216 Address(CpuRegister(RSP), second.GetStackIndex()));
3217 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003218 break;
Calin Juravle11351682014-10-23 15:38:15 +01003219 }
3220
3221 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003222 if (second.IsFpuRegister()) {
3223 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3224 } else if (second.IsConstant()) {
3225 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003226 codegen_->LiteralDoubleAddress(
3227 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003228 } else {
3229 DCHECK(second.IsDoubleStackSlot());
3230 __ subsd(first.AsFpuRegister<XmmRegister>(),
3231 Address(CpuRegister(RSP), second.GetStackIndex()));
3232 }
Calin Juravle11351682014-10-23 15:38:15 +01003233 break;
3234 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003235
3236 default:
Calin Juravle11351682014-10-23 15:38:15 +01003237 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003238 }
3239}
3240
Calin Juravle34bacdf2014-10-07 20:23:36 +01003241void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3242 LocationSummary* locations =
3243 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3244 switch (mul->GetResultType()) {
3245 case Primitive::kPrimInt: {
3246 locations->SetInAt(0, Location::RequiresRegister());
3247 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003248 if (mul->InputAt(1)->IsIntConstant()) {
3249 // Can use 3 operand multiply.
3250 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3251 } else {
3252 locations->SetOut(Location::SameAsFirstInput());
3253 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003254 break;
3255 }
3256 case Primitive::kPrimLong: {
3257 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003258 locations->SetInAt(1, Location::Any());
3259 if (mul->InputAt(1)->IsLongConstant() &&
3260 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003261 // Can use 3 operand multiply.
3262 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3263 } else {
3264 locations->SetOut(Location::SameAsFirstInput());
3265 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266 break;
3267 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003268 case Primitive::kPrimFloat:
3269 case Primitive::kPrimDouble: {
3270 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003271 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003272 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003273 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003274 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003275
3276 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003277 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003278 }
3279}
3280
3281void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3282 LocationSummary* locations = mul->GetLocations();
3283 Location first = locations->InAt(0);
3284 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003285 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003286 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003287 case Primitive::kPrimInt:
3288 // The constant may have ended up in a register, so test explicitly to avoid
3289 // problems where the output may not be the same as the first operand.
3290 if (mul->InputAt(1)->IsIntConstant()) {
3291 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3292 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3293 } else if (second.IsRegister()) {
3294 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003295 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003296 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003297 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003298 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003299 __ imull(first.AsRegister<CpuRegister>(),
3300 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003301 }
3302 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003303 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003304 // The constant may have ended up in a register, so test explicitly to avoid
3305 // problems where the output may not be the same as the first operand.
3306 if (mul->InputAt(1)->IsLongConstant()) {
3307 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3308 if (IsInt<32>(value)) {
3309 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3310 Immediate(static_cast<int32_t>(value)));
3311 } else {
3312 // Have to use the constant area.
3313 DCHECK(first.Equals(out));
3314 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3315 }
3316 } else if (second.IsRegister()) {
3317 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003318 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003319 } else {
3320 DCHECK(second.IsDoubleStackSlot());
3321 DCHECK(first.Equals(out));
3322 __ imulq(first.AsRegister<CpuRegister>(),
3323 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003324 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003325 break;
3326 }
3327
Calin Juravleb5bfa962014-10-21 18:02:24 +01003328 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003329 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003330 if (second.IsFpuRegister()) {
3331 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3332 } else if (second.IsConstant()) {
3333 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003334 codegen_->LiteralFloatAddress(
3335 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003336 } else {
3337 DCHECK(second.IsStackSlot());
3338 __ mulss(first.AsFpuRegister<XmmRegister>(),
3339 Address(CpuRegister(RSP), second.GetStackIndex()));
3340 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003341 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003342 }
3343
3344 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003345 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003346 if (second.IsFpuRegister()) {
3347 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3348 } else if (second.IsConstant()) {
3349 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003350 codegen_->LiteralDoubleAddress(
3351 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003352 } else {
3353 DCHECK(second.IsDoubleStackSlot());
3354 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3355 Address(CpuRegister(RSP), second.GetStackIndex()));
3356 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003357 break;
3358 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003359
3360 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003361 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003362 }
3363}
3364
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003365void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3366 uint32_t stack_adjustment, bool is_float) {
3367 if (source.IsStackSlot()) {
3368 DCHECK(is_float);
3369 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3370 } else if (source.IsDoubleStackSlot()) {
3371 DCHECK(!is_float);
3372 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3373 } else {
3374 // Write the value to the temporary location on the stack and load to FP stack.
3375 if (is_float) {
3376 Location stack_temp = Location::StackSlot(temp_offset);
3377 codegen_->Move(stack_temp, source);
3378 __ flds(Address(CpuRegister(RSP), temp_offset));
3379 } else {
3380 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3381 codegen_->Move(stack_temp, source);
3382 __ fldl(Address(CpuRegister(RSP), temp_offset));
3383 }
3384 }
3385}
3386
3387void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3388 Primitive::Type type = rem->GetResultType();
3389 bool is_float = type == Primitive::kPrimFloat;
3390 size_t elem_size = Primitive::ComponentSize(type);
3391 LocationSummary* locations = rem->GetLocations();
3392 Location first = locations->InAt(0);
3393 Location second = locations->InAt(1);
3394 Location out = locations->Out();
3395
3396 // Create stack space for 2 elements.
3397 // TODO: enhance register allocator to ask for stack temporaries.
3398 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3399
3400 // Load the values to the FP stack in reverse order, using temporaries if needed.
3401 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3402 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3403
3404 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003405 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003406 __ Bind(&retry);
3407 __ fprem();
3408
3409 // Move FP status to AX.
3410 __ fstsw();
3411
3412 // And see if the argument reduction is complete. This is signaled by the
3413 // C2 FPU flag bit set to 0.
3414 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3415 __ j(kNotEqual, &retry);
3416
3417 // We have settled on the final value. Retrieve it into an XMM register.
3418 // Store FP top of stack to real stack.
3419 if (is_float) {
3420 __ fsts(Address(CpuRegister(RSP), 0));
3421 } else {
3422 __ fstl(Address(CpuRegister(RSP), 0));
3423 }
3424
3425 // Pop the 2 items from the FP stack.
3426 __ fucompp();
3427
3428 // Load the value from the stack into an XMM register.
3429 DCHECK(out.IsFpuRegister()) << out;
3430 if (is_float) {
3431 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3432 } else {
3433 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3434 }
3435
3436 // And remove the temporary stack space we allocated.
3437 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3438}
3439
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003440void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3441 DCHECK(instruction->IsDiv() || instruction->IsRem());
3442
3443 LocationSummary* locations = instruction->GetLocations();
3444 Location second = locations->InAt(1);
3445 DCHECK(second.IsConstant());
3446
3447 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3448 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003449 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003450
3451 DCHECK(imm == 1 || imm == -1);
3452
3453 switch (instruction->GetResultType()) {
3454 case Primitive::kPrimInt: {
3455 if (instruction->IsRem()) {
3456 __ xorl(output_register, output_register);
3457 } else {
3458 __ movl(output_register, input_register);
3459 if (imm == -1) {
3460 __ negl(output_register);
3461 }
3462 }
3463 break;
3464 }
3465
3466 case Primitive::kPrimLong: {
3467 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003468 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003469 } else {
3470 __ movq(output_register, input_register);
3471 if (imm == -1) {
3472 __ negq(output_register);
3473 }
3474 }
3475 break;
3476 }
3477
3478 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003479 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003480 }
3481}
3482
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003483void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003484 LocationSummary* locations = instruction->GetLocations();
3485 Location second = locations->InAt(1);
3486
3487 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3488 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3489
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003490 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003491 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3492 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003493
3494 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3495
3496 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003497 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003498 __ testl(numerator, numerator);
3499 __ cmov(kGreaterEqual, tmp, numerator);
3500 int shift = CTZ(imm);
3501 __ sarl(tmp, Immediate(shift));
3502
3503 if (imm < 0) {
3504 __ negl(tmp);
3505 }
3506
3507 __ movl(output_register, tmp);
3508 } else {
3509 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3510 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3511
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003512 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 __ addq(rdx, numerator);
3514 __ testq(numerator, numerator);
3515 __ cmov(kGreaterEqual, rdx, numerator);
3516 int shift = CTZ(imm);
3517 __ sarq(rdx, Immediate(shift));
3518
3519 if (imm < 0) {
3520 __ negq(rdx);
3521 }
3522
3523 __ movq(output_register, rdx);
3524 }
3525}
3526
3527void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3528 DCHECK(instruction->IsDiv() || instruction->IsRem());
3529
3530 LocationSummary* locations = instruction->GetLocations();
3531 Location second = locations->InAt(1);
3532
3533 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3534 : locations->GetTemp(0).AsRegister<CpuRegister>();
3535 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3536 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3537 : locations->Out().AsRegister<CpuRegister>();
3538 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3539
3540 DCHECK_EQ(RAX, eax.AsRegister());
3541 DCHECK_EQ(RDX, edx.AsRegister());
3542 if (instruction->IsDiv()) {
3543 DCHECK_EQ(RAX, out.AsRegister());
3544 } else {
3545 DCHECK_EQ(RDX, out.AsRegister());
3546 }
3547
3548 int64_t magic;
3549 int shift;
3550
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003551 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003552 if (instruction->GetResultType() == Primitive::kPrimInt) {
3553 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3554
3555 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3556
3557 __ movl(numerator, eax);
3558
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003559 __ movl(eax, Immediate(magic));
3560 __ imull(numerator);
3561
3562 if (imm > 0 && magic < 0) {
3563 __ addl(edx, numerator);
3564 } else if (imm < 0 && magic > 0) {
3565 __ subl(edx, numerator);
3566 }
3567
3568 if (shift != 0) {
3569 __ sarl(edx, Immediate(shift));
3570 }
3571
3572 __ movl(eax, edx);
3573 __ shrl(edx, Immediate(31));
3574 __ addl(edx, eax);
3575
3576 if (instruction->IsRem()) {
3577 __ movl(eax, numerator);
3578 __ imull(edx, Immediate(imm));
3579 __ subl(eax, edx);
3580 __ movl(edx, eax);
3581 } else {
3582 __ movl(eax, edx);
3583 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003584 } else {
3585 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3586
3587 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3588
3589 CpuRegister rax = eax;
3590 CpuRegister rdx = edx;
3591
3592 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3593
3594 // Save the numerator.
3595 __ movq(numerator, rax);
3596
3597 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003598 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003599
3600 // RDX:RAX = magic * numerator
3601 __ imulq(numerator);
3602
3603 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003604 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003605 __ addq(rdx, numerator);
3606 } else if (imm < 0 && magic > 0) {
3607 // RDX -= numerator
3608 __ subq(rdx, numerator);
3609 }
3610
3611 // Shift if needed.
3612 if (shift != 0) {
3613 __ sarq(rdx, Immediate(shift));
3614 }
3615
3616 // RDX += 1 if RDX < 0
3617 __ movq(rax, rdx);
3618 __ shrq(rdx, Immediate(63));
3619 __ addq(rdx, rax);
3620
3621 if (instruction->IsRem()) {
3622 __ movq(rax, numerator);
3623
3624 if (IsInt<32>(imm)) {
3625 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3626 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003627 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003628 }
3629
3630 __ subq(rax, rdx);
3631 __ movq(rdx, rax);
3632 } else {
3633 __ movq(rax, rdx);
3634 }
3635 }
3636}
3637
Calin Juravlebacfec32014-11-14 15:54:36 +00003638void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3639 DCHECK(instruction->IsDiv() || instruction->IsRem());
3640 Primitive::Type type = instruction->GetResultType();
3641 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3642
3643 bool is_div = instruction->IsDiv();
3644 LocationSummary* locations = instruction->GetLocations();
3645
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003646 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3647 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003648
Roland Levillain271ab9c2014-11-27 15:23:57 +00003649 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003651
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003652 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003653 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003654
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003655 if (imm == 0) {
3656 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3657 } else if (imm == 1 || imm == -1) {
3658 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003659 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003660 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003661 } else {
3662 DCHECK(imm <= -2 || imm >= 2);
3663 GenerateDivRemWithAnyConstant(instruction);
3664 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003665 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003666 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003667 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003668 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003669 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003670
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003671 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3672 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3673 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3674 // so it's safe to just use negl instead of more complex comparisons.
3675 if (type == Primitive::kPrimInt) {
3676 __ cmpl(second_reg, Immediate(-1));
3677 __ j(kEqual, slow_path->GetEntryLabel());
3678 // edx:eax <- sign-extended of eax
3679 __ cdq();
3680 // eax = quotient, edx = remainder
3681 __ idivl(second_reg);
3682 } else {
3683 __ cmpq(second_reg, Immediate(-1));
3684 __ j(kEqual, slow_path->GetEntryLabel());
3685 // rdx:rax <- sign-extended of rax
3686 __ cqo();
3687 // rax = quotient, rdx = remainder
3688 __ idivq(second_reg);
3689 }
3690 __ Bind(slow_path->GetExitLabel());
3691 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003692}
3693
Calin Juravle7c4954d2014-10-28 16:57:40 +00003694void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3695 LocationSummary* locations =
3696 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3697 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003698 case Primitive::kPrimInt:
3699 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003700 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003701 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003702 locations->SetOut(Location::SameAsFirstInput());
3703 // Intel uses edx:eax as the dividend.
3704 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003705 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3706 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3707 // output and request another temp.
3708 if (div->InputAt(1)->IsConstant()) {
3709 locations->AddTemp(Location::RequiresRegister());
3710 }
Calin Juravled0d48522014-11-04 16:40:20 +00003711 break;
3712 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003713
Calin Juravle7c4954d2014-10-28 16:57:40 +00003714 case Primitive::kPrimFloat:
3715 case Primitive::kPrimDouble: {
3716 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003717 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003718 locations->SetOut(Location::SameAsFirstInput());
3719 break;
3720 }
3721
3722 default:
3723 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3724 }
3725}
3726
3727void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3728 LocationSummary* locations = div->GetLocations();
3729 Location first = locations->InAt(0);
3730 Location second = locations->InAt(1);
3731 DCHECK(first.Equals(locations->Out()));
3732
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003733 Primitive::Type type = div->GetResultType();
3734 switch (type) {
3735 case Primitive::kPrimInt:
3736 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003737 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003738 break;
3739 }
3740
Calin Juravle7c4954d2014-10-28 16:57:40 +00003741 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003742 if (second.IsFpuRegister()) {
3743 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3744 } else if (second.IsConstant()) {
3745 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003746 codegen_->LiteralFloatAddress(
3747 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003748 } else {
3749 DCHECK(second.IsStackSlot());
3750 __ divss(first.AsFpuRegister<XmmRegister>(),
3751 Address(CpuRegister(RSP), second.GetStackIndex()));
3752 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003753 break;
3754 }
3755
3756 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003757 if (second.IsFpuRegister()) {
3758 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3759 } else if (second.IsConstant()) {
3760 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003761 codegen_->LiteralDoubleAddress(
3762 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003763 } else {
3764 DCHECK(second.IsDoubleStackSlot());
3765 __ divsd(first.AsFpuRegister<XmmRegister>(),
3766 Address(CpuRegister(RSP), second.GetStackIndex()));
3767 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003768 break;
3769 }
3770
3771 default:
3772 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3773 }
3774}
3775
Calin Juravlebacfec32014-11-14 15:54:36 +00003776void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003777 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003778 LocationSummary* locations =
3779 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003780
3781 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003782 case Primitive::kPrimInt:
3783 case Primitive::kPrimLong: {
3784 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003785 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003786 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3787 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003788 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3789 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3790 // output and request another temp.
3791 if (rem->InputAt(1)->IsConstant()) {
3792 locations->AddTemp(Location::RequiresRegister());
3793 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003794 break;
3795 }
3796
3797 case Primitive::kPrimFloat:
3798 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003799 locations->SetInAt(0, Location::Any());
3800 locations->SetInAt(1, Location::Any());
3801 locations->SetOut(Location::RequiresFpuRegister());
3802 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003803 break;
3804 }
3805
3806 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003807 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003808 }
3809}
3810
3811void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3812 Primitive::Type type = rem->GetResultType();
3813 switch (type) {
3814 case Primitive::kPrimInt:
3815 case Primitive::kPrimLong: {
3816 GenerateDivRemIntegral(rem);
3817 break;
3818 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003819 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003820 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003821 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003822 break;
3823 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003824 default:
3825 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3826 }
3827}
3828
Calin Juravled0d48522014-11-04 16:40:20 +00003829void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003830 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003831 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003832}
3833
3834void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003835 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003836 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3837 codegen_->AddSlowPath(slow_path);
3838
3839 LocationSummary* locations = instruction->GetLocations();
3840 Location value = locations->InAt(0);
3841
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003842 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003843 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003844 case Primitive::kPrimByte:
3845 case Primitive::kPrimChar:
3846 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003847 case Primitive::kPrimInt: {
3848 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003849 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003850 __ j(kEqual, slow_path->GetEntryLabel());
3851 } else if (value.IsStackSlot()) {
3852 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3853 __ j(kEqual, slow_path->GetEntryLabel());
3854 } else {
3855 DCHECK(value.IsConstant()) << value;
3856 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003857 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003858 }
3859 }
3860 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003861 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003862 case Primitive::kPrimLong: {
3863 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003864 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003865 __ j(kEqual, slow_path->GetEntryLabel());
3866 } else if (value.IsDoubleStackSlot()) {
3867 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3868 __ j(kEqual, slow_path->GetEntryLabel());
3869 } else {
3870 DCHECK(value.IsConstant()) << value;
3871 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003872 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003873 }
3874 }
3875 break;
3876 }
3877 default:
3878 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003879 }
Calin Juravled0d48522014-11-04 16:40:20 +00003880}
3881
Calin Juravle9aec02f2014-11-18 23:06:35 +00003882void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3883 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3884
3885 LocationSummary* locations =
3886 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3887
3888 switch (op->GetResultType()) {
3889 case Primitive::kPrimInt:
3890 case Primitive::kPrimLong: {
3891 locations->SetInAt(0, Location::RequiresRegister());
3892 // The shift count needs to be in CL.
3893 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3894 locations->SetOut(Location::SameAsFirstInput());
3895 break;
3896 }
3897 default:
3898 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3899 }
3900}
3901
3902void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3903 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3904
3905 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003906 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003907 Location second = locations->InAt(1);
3908
3909 switch (op->GetResultType()) {
3910 case Primitive::kPrimInt: {
3911 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003912 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003913 if (op->IsShl()) {
3914 __ shll(first_reg, second_reg);
3915 } else if (op->IsShr()) {
3916 __ sarl(first_reg, second_reg);
3917 } else {
3918 __ shrl(first_reg, second_reg);
3919 }
3920 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003921 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003922 if (op->IsShl()) {
3923 __ shll(first_reg, imm);
3924 } else if (op->IsShr()) {
3925 __ sarl(first_reg, imm);
3926 } else {
3927 __ shrl(first_reg, imm);
3928 }
3929 }
3930 break;
3931 }
3932 case Primitive::kPrimLong: {
3933 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003934 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003935 if (op->IsShl()) {
3936 __ shlq(first_reg, second_reg);
3937 } else if (op->IsShr()) {
3938 __ sarq(first_reg, second_reg);
3939 } else {
3940 __ shrq(first_reg, second_reg);
3941 }
3942 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003943 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003944 if (op->IsShl()) {
3945 __ shlq(first_reg, imm);
3946 } else if (op->IsShr()) {
3947 __ sarq(first_reg, imm);
3948 } else {
3949 __ shrq(first_reg, imm);
3950 }
3951 }
3952 break;
3953 }
3954 default:
3955 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003956 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003957 }
3958}
3959
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003960void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3961 LocationSummary* locations =
3962 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3963
3964 switch (ror->GetResultType()) {
3965 case Primitive::kPrimInt:
3966 case Primitive::kPrimLong: {
3967 locations->SetInAt(0, Location::RequiresRegister());
3968 // The shift count needs to be in CL (unless it is a constant).
3969 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3970 locations->SetOut(Location::SameAsFirstInput());
3971 break;
3972 }
3973 default:
3974 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3975 UNREACHABLE();
3976 }
3977}
3978
3979void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3980 LocationSummary* locations = ror->GetLocations();
3981 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3982 Location second = locations->InAt(1);
3983
3984 switch (ror->GetResultType()) {
3985 case Primitive::kPrimInt:
3986 if (second.IsRegister()) {
3987 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3988 __ rorl(first_reg, second_reg);
3989 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003990 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003991 __ rorl(first_reg, imm);
3992 }
3993 break;
3994 case Primitive::kPrimLong:
3995 if (second.IsRegister()) {
3996 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3997 __ rorq(first_reg, second_reg);
3998 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003999 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004000 __ rorq(first_reg, imm);
4001 }
4002 break;
4003 default:
4004 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4005 UNREACHABLE();
4006 }
4007}
4008
Calin Juravle9aec02f2014-11-18 23:06:35 +00004009void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4010 HandleShift(shl);
4011}
4012
4013void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4014 HandleShift(shl);
4015}
4016
4017void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4018 HandleShift(shr);
4019}
4020
4021void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4022 HandleShift(shr);
4023}
4024
4025void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4026 HandleShift(ushr);
4027}
4028
4029void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4030 HandleShift(ushr);
4031}
4032
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004033void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004034 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004035 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004036 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004037 if (instruction->IsStringAlloc()) {
4038 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4039 } else {
4040 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004041 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004042 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004043}
4044
4045void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004046 // Note: if heap poisoning is enabled, the entry point takes cares
4047 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004048 if (instruction->IsStringAlloc()) {
4049 // String is allocated through StringFactory. Call NewEmptyString entry point.
4050 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004051 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004052 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4053 __ call(Address(temp, code_offset.SizeValue()));
4054 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4055 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004056 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray2b615ba2017-01-06 14:40:07 +00004057 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004058 DCHECK(!codegen_->IsLeafMethod());
4059 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004060}
4061
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004062void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4063 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004064 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004065 InvokeRuntimeCallingConvention calling_convention;
4066 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004067 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004068 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004069 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004070}
4071
4072void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4073 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004074 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
Andreas Gampea5b09a62016-11-17 15:21:22 -08004075 instruction->GetTypeIndex().index_);
Roland Levillain4d027112015-07-01 15:41:14 +01004076 // Note: if heap poisoning is enabled, the entry point takes cares
4077 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004078 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004079 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004080
4081 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004082}
4083
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004084void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004085 LocationSummary* locations =
4086 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004087 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4088 if (location.IsStackSlot()) {
4089 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4090 } else if (location.IsDoubleStackSlot()) {
4091 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4092 }
4093 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004094}
4095
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004096void InstructionCodeGeneratorX86_64::VisitParameterValue(
4097 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004098 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004099}
4100
4101void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4102 LocationSummary* locations =
4103 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4104 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4105}
4106
4107void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4108 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4109 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004110}
4111
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004112void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4113 LocationSummary* locations =
4114 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4115 locations->SetInAt(0, Location::RequiresRegister());
4116 locations->SetOut(Location::RequiresRegister());
4117}
4118
4119void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4120 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004121 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004122 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004123 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004124 __ movq(locations->Out().AsRegister<CpuRegister>(),
4125 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004126 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004127 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004128 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004129 __ movq(locations->Out().AsRegister<CpuRegister>(),
4130 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4131 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004132 __ movq(locations->Out().AsRegister<CpuRegister>(),
4133 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004134 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004135}
4136
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004137void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004138 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004139 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004140 locations->SetInAt(0, Location::RequiresRegister());
4141 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004142}
4143
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004144void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4145 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004146 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4147 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004148 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004149 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004150 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004151 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004152 break;
4153
4154 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004155 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004156 break;
4157
4158 default:
4159 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4160 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004161}
4162
David Brazdil66d126e2015-04-03 16:02:44 +01004163void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4164 LocationSummary* locations =
4165 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4166 locations->SetInAt(0, Location::RequiresRegister());
4167 locations->SetOut(Location::SameAsFirstInput());
4168}
4169
4170void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004171 LocationSummary* locations = bool_not->GetLocations();
4172 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4173 locations->Out().AsRegister<CpuRegister>().AsRegister());
4174 Location out = locations->Out();
4175 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4176}
4177
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004178void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004179 LocationSummary* locations =
4180 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004181 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004182 locations->SetInAt(i, Location::Any());
4183 }
4184 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004185}
4186
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004187void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004188 LOG(FATAL) << "Unimplemented";
4189}
4190
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004191void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004192 /*
4193 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004194 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004195 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4196 */
4197 switch (kind) {
4198 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004199 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004200 break;
4201 }
4202 case MemBarrierKind::kAnyStore:
4203 case MemBarrierKind::kLoadAny:
4204 case MemBarrierKind::kStoreStore: {
4205 // nop
4206 break;
4207 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004208 case MemBarrierKind::kNTStoreStore:
4209 // Non-Temporal Store/Store needs an explicit fence.
4210 MemoryFence(/* non-temporal */ true);
4211 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004212 }
4213}
4214
4215void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4216 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4217
Roland Levillain0d5a2812015-11-13 10:07:31 +00004218 bool object_field_get_with_read_barrier =
4219 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004220 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004221 new (GetGraph()->GetArena()) LocationSummary(instruction,
4222 object_field_get_with_read_barrier ?
4223 LocationSummary::kCallOnSlowPath :
4224 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004225 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004226 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004227 }
Calin Juravle52c48962014-12-16 17:02:57 +00004228 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004229 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4230 locations->SetOut(Location::RequiresFpuRegister());
4231 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004232 // The output overlaps for an object field get when read barriers
4233 // are enabled: we do not want the move to overwrite the object's
4234 // location, as we need it to emit the read barrier.
4235 locations->SetOut(
4236 Location::RequiresRegister(),
4237 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004238 }
Calin Juravle52c48962014-12-16 17:02:57 +00004239}
4240
4241void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4242 const FieldInfo& field_info) {
4243 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4244
4245 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004246 Location base_loc = locations->InAt(0);
4247 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004248 Location out = locations->Out();
4249 bool is_volatile = field_info.IsVolatile();
4250 Primitive::Type field_type = field_info.GetFieldType();
4251 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4252
4253 switch (field_type) {
4254 case Primitive::kPrimBoolean: {
4255 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4256 break;
4257 }
4258
4259 case Primitive::kPrimByte: {
4260 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4261 break;
4262 }
4263
4264 case Primitive::kPrimShort: {
4265 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4266 break;
4267 }
4268
4269 case Primitive::kPrimChar: {
4270 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4271 break;
4272 }
4273
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004274 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004275 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4276 break;
4277 }
4278
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004279 case Primitive::kPrimNot: {
4280 // /* HeapReference<Object> */ out = *(base + offset)
4281 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004282 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004283 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004284 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004285 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004286 if (is_volatile) {
4287 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4288 }
4289 } else {
4290 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4291 codegen_->MaybeRecordImplicitNullCheck(instruction);
4292 if (is_volatile) {
4293 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4294 }
4295 // If read barriers are enabled, emit read barriers other than
4296 // Baker's using a slow path (and also unpoison the loaded
4297 // reference, if heap poisoning is enabled).
4298 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4299 }
4300 break;
4301 }
4302
Calin Juravle52c48962014-12-16 17:02:57 +00004303 case Primitive::kPrimLong: {
4304 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4305 break;
4306 }
4307
4308 case Primitive::kPrimFloat: {
4309 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4310 break;
4311 }
4312
4313 case Primitive::kPrimDouble: {
4314 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4315 break;
4316 }
4317
4318 case Primitive::kPrimVoid:
4319 LOG(FATAL) << "Unreachable type " << field_type;
4320 UNREACHABLE();
4321 }
4322
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004323 if (field_type == Primitive::kPrimNot) {
4324 // Potential implicit null checks, in the case of reference
4325 // fields, are handled in the previous switch statement.
4326 } else {
4327 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004328 }
Roland Levillain4d027112015-07-01 15:41:14 +01004329
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004330 if (is_volatile) {
4331 if (field_type == Primitive::kPrimNot) {
4332 // Memory barriers, in the case of references, are also handled
4333 // in the previous switch statement.
4334 } else {
4335 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4336 }
Roland Levillain4d027112015-07-01 15:41:14 +01004337 }
Calin Juravle52c48962014-12-16 17:02:57 +00004338}
4339
4340void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4341 const FieldInfo& field_info) {
4342 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4343
4344 LocationSummary* locations =
4345 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004346 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004347 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004348 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004349 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004350
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004351 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004352 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004353 if (is_volatile) {
4354 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4355 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4356 } else {
4357 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4358 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004359 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004360 if (is_volatile) {
4361 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4362 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4363 } else {
4364 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4365 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004366 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004367 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004368 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004369 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004370 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004371 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4372 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004373 locations->AddTemp(Location::RequiresRegister());
4374 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004375}
4376
Calin Juravle52c48962014-12-16 17:02:57 +00004377void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004378 const FieldInfo& field_info,
4379 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004380 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4381
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004382 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004383 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4384 Location value = locations->InAt(1);
4385 bool is_volatile = field_info.IsVolatile();
4386 Primitive::Type field_type = field_info.GetFieldType();
4387 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4388
4389 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004390 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004391 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004392
Mark Mendellea5af682015-10-22 17:35:49 -04004393 bool maybe_record_implicit_null_check_done = false;
4394
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004395 switch (field_type) {
4396 case Primitive::kPrimBoolean:
4397 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004398 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004399 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004400 __ movb(Address(base, offset), Immediate(v));
4401 } else {
4402 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4403 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004404 break;
4405 }
4406
4407 case Primitive::kPrimShort:
4408 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004409 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004410 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004411 __ movw(Address(base, offset), Immediate(v));
4412 } else {
4413 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4414 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004415 break;
4416 }
4417
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004418 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004419 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004420 if (value.IsConstant()) {
4421 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004422 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4423 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4424 // Note: if heap poisoning is enabled, no need to poison
4425 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004426 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004427 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004428 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4429 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4430 __ movl(temp, value.AsRegister<CpuRegister>());
4431 __ PoisonHeapReference(temp);
4432 __ movl(Address(base, offset), temp);
4433 } else {
4434 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4435 }
Mark Mendell40741f32015-04-20 22:10:34 -04004436 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004437 break;
4438 }
4439
4440 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004441 if (value.IsConstant()) {
4442 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004443 codegen_->MoveInt64ToAddress(Address(base, offset),
4444 Address(base, offset + sizeof(int32_t)),
4445 v,
4446 instruction);
4447 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004448 } else {
4449 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4450 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004451 break;
4452 }
4453
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004454 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004455 if (value.IsConstant()) {
4456 int32_t v =
4457 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4458 __ movl(Address(base, offset), Immediate(v));
4459 } else {
4460 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4461 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004462 break;
4463 }
4464
4465 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004466 if (value.IsConstant()) {
4467 int64_t v =
4468 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4469 codegen_->MoveInt64ToAddress(Address(base, offset),
4470 Address(base, offset + sizeof(int32_t)),
4471 v,
4472 instruction);
4473 maybe_record_implicit_null_check_done = true;
4474 } else {
4475 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4476 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004477 break;
4478 }
4479
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004480 case Primitive::kPrimVoid:
4481 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004482 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004483 }
Calin Juravle52c48962014-12-16 17:02:57 +00004484
Mark Mendellea5af682015-10-22 17:35:49 -04004485 if (!maybe_record_implicit_null_check_done) {
4486 codegen_->MaybeRecordImplicitNullCheck(instruction);
4487 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004488
4489 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4490 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4491 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004492 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004493 }
4494
Calin Juravle52c48962014-12-16 17:02:57 +00004495 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004496 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004497 }
4498}
4499
4500void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4501 HandleFieldSet(instruction, instruction->GetFieldInfo());
4502}
4503
4504void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004505 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004506}
4507
4508void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004509 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004510}
4511
4512void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004513 HandleFieldGet(instruction, instruction->GetFieldInfo());
4514}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004515
Calin Juravle52c48962014-12-16 17:02:57 +00004516void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4517 HandleFieldGet(instruction);
4518}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004519
Calin Juravle52c48962014-12-16 17:02:57 +00004520void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4521 HandleFieldGet(instruction, instruction->GetFieldInfo());
4522}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004523
Calin Juravle52c48962014-12-16 17:02:57 +00004524void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4525 HandleFieldSet(instruction, instruction->GetFieldInfo());
4526}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004527
Calin Juravle52c48962014-12-16 17:02:57 +00004528void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004529 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004530}
4531
Calin Juravlee460d1d2015-09-29 04:52:17 +01004532void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4533 HUnresolvedInstanceFieldGet* instruction) {
4534 FieldAccessCallingConventionX86_64 calling_convention;
4535 codegen_->CreateUnresolvedFieldLocationSummary(
4536 instruction, instruction->GetFieldType(), calling_convention);
4537}
4538
4539void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4540 HUnresolvedInstanceFieldGet* instruction) {
4541 FieldAccessCallingConventionX86_64 calling_convention;
4542 codegen_->GenerateUnresolvedFieldAccess(instruction,
4543 instruction->GetFieldType(),
4544 instruction->GetFieldIndex(),
4545 instruction->GetDexPc(),
4546 calling_convention);
4547}
4548
4549void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4550 HUnresolvedInstanceFieldSet* instruction) {
4551 FieldAccessCallingConventionX86_64 calling_convention;
4552 codegen_->CreateUnresolvedFieldLocationSummary(
4553 instruction, instruction->GetFieldType(), calling_convention);
4554}
4555
4556void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4557 HUnresolvedInstanceFieldSet* instruction) {
4558 FieldAccessCallingConventionX86_64 calling_convention;
4559 codegen_->GenerateUnresolvedFieldAccess(instruction,
4560 instruction->GetFieldType(),
4561 instruction->GetFieldIndex(),
4562 instruction->GetDexPc(),
4563 calling_convention);
4564}
4565
4566void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4567 HUnresolvedStaticFieldGet* instruction) {
4568 FieldAccessCallingConventionX86_64 calling_convention;
4569 codegen_->CreateUnresolvedFieldLocationSummary(
4570 instruction, instruction->GetFieldType(), calling_convention);
4571}
4572
4573void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4574 HUnresolvedStaticFieldGet* instruction) {
4575 FieldAccessCallingConventionX86_64 calling_convention;
4576 codegen_->GenerateUnresolvedFieldAccess(instruction,
4577 instruction->GetFieldType(),
4578 instruction->GetFieldIndex(),
4579 instruction->GetDexPc(),
4580 calling_convention);
4581}
4582
4583void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4584 HUnresolvedStaticFieldSet* instruction) {
4585 FieldAccessCallingConventionX86_64 calling_convention;
4586 codegen_->CreateUnresolvedFieldLocationSummary(
4587 instruction, instruction->GetFieldType(), calling_convention);
4588}
4589
4590void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4591 HUnresolvedStaticFieldSet* instruction) {
4592 FieldAccessCallingConventionX86_64 calling_convention;
4593 codegen_->GenerateUnresolvedFieldAccess(instruction,
4594 instruction->GetFieldType(),
4595 instruction->GetFieldIndex(),
4596 instruction->GetDexPc(),
4597 calling_convention);
4598}
4599
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004600void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004601 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4602 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4603 ? Location::RequiresRegister()
4604 : Location::Any();
4605 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004606}
4607
Calin Juravle2ae48182016-03-16 14:05:09 +00004608void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4609 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004610 return;
4611 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004612 LocationSummary* locations = instruction->GetLocations();
4613 Location obj = locations->InAt(0);
4614
4615 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004616 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004617}
4618
Calin Juravle2ae48182016-03-16 14:05:09 +00004619void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004620 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004621 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004622
4623 LocationSummary* locations = instruction->GetLocations();
4624 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004625
4626 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004627 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004628 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004629 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004630 } else {
4631 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004632 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004633 __ jmp(slow_path->GetEntryLabel());
4634 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004635 }
4636 __ j(kEqual, slow_path->GetEntryLabel());
4637}
4638
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004639void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004640 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004641}
4642
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004643void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004644 bool object_array_get_with_read_barrier =
4645 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004646 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004647 new (GetGraph()->GetArena()) LocationSummary(instruction,
4648 object_array_get_with_read_barrier ?
4649 LocationSummary::kCallOnSlowPath :
4650 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004651 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004652 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004653 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004654 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004655 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004656 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4657 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4658 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004659 // The output overlaps for an object array get when read barriers
4660 // are enabled: we do not want the move to overwrite the array's
4661 // location, as we need it to emit the read barrier.
4662 locations->SetOut(
4663 Location::RequiresRegister(),
4664 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004665 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004666}
4667
4668void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4669 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004670 Location obj_loc = locations->InAt(0);
4671 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004672 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004673 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004674 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004675
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004676 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004677 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004678 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004679 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004680 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004681 break;
4682 }
4683
4684 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004685 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004686 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004687 break;
4688 }
4689
4690 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004691 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004692 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004693 break;
4694 }
4695
4696 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004697 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004698 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4699 // Branch cases into compressed and uncompressed for each index's type.
4700 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4701 NearLabel done, not_compressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004702 __ testl(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004703 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004704 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4705 "Expecting 0=compressed, 1=uncompressed");
4706 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004707 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4708 __ jmp(&done);
4709 __ Bind(&not_compressed);
4710 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4711 __ Bind(&done);
4712 } else {
4713 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4714 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004715 break;
4716 }
4717
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004719 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004720 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004721 break;
4722 }
4723
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004724 case Primitive::kPrimNot: {
4725 static_assert(
4726 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4727 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004728 // /* HeapReference<Object> */ out =
4729 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4730 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004731 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004732 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004733 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004734 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004735 } else {
4736 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004737 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4738 codegen_->MaybeRecordImplicitNullCheck(instruction);
4739 // If read barriers are enabled, emit read barriers other than
4740 // Baker's using a slow path (and also unpoison the loaded
4741 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004742 if (index.IsConstant()) {
4743 uint32_t offset =
4744 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004745 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4746 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004747 codegen_->MaybeGenerateReadBarrierSlow(
4748 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4749 }
4750 }
4751 break;
4752 }
4753
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004754 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004755 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004756 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004757 break;
4758 }
4759
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004760 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004761 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004762 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004763 break;
4764 }
4765
4766 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004767 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004768 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004769 break;
4770 }
4771
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004772 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004773 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004774 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004775 }
Roland Levillain4d027112015-07-01 15:41:14 +01004776
4777 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004778 // Potential implicit null checks, in the case of reference
4779 // arrays, are handled in the previous switch statement.
4780 } else {
4781 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004782 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004783}
4784
4785void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004786 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004787
4788 bool needs_write_barrier =
4789 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004790 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004791
Nicolas Geoffray39468442014-09-02 15:17:15 +01004792 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004793 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004794 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004795 LocationSummary::kCallOnSlowPath :
4796 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004797
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004798 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004799 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4800 if (Primitive::IsFloatingPointType(value_type)) {
4801 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004802 } else {
4803 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4804 }
4805
4806 if (needs_write_barrier) {
4807 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004808 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004809 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004810 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004811}
4812
4813void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4814 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004815 Location array_loc = locations->InAt(0);
4816 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004817 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004818 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004819 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004820 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004821 bool needs_write_barrier =
4822 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004823 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4824 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4825 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004826
4827 switch (value_type) {
4828 case Primitive::kPrimBoolean:
4829 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004830 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004831 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004832 if (value.IsRegister()) {
4833 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004834 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004835 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004836 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004837 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004838 break;
4839 }
4840
4841 case Primitive::kPrimShort:
4842 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004843 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004844 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004845 if (value.IsRegister()) {
4846 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004847 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004848 DCHECK(value.IsConstant()) << value;
4849 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004850 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004851 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004852 break;
4853 }
4854
4855 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004856 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004857 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004858
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004859 if (!value.IsRegister()) {
4860 // Just setting null.
4861 DCHECK(instruction->InputAt(2)->IsNullConstant());
4862 DCHECK(value.IsConstant()) << value;
4863 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004864 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004865 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004866 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004867 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004868 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004869
4870 DCHECK(needs_write_barrier);
4871 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004872 // We cannot use a NearLabel for `done`, as its range may be too
4873 // short when Baker read barriers are enabled.
4874 Label done;
4875 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004876 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004877 Location temp_loc = locations->GetTemp(0);
4878 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004879 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004880 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4881 codegen_->AddSlowPath(slow_path);
4882 if (instruction->GetValueCanBeNull()) {
4883 __ testl(register_value, register_value);
4884 __ j(kNotEqual, &not_null);
4885 __ movl(address, Immediate(0));
4886 codegen_->MaybeRecordImplicitNullCheck(instruction);
4887 __ jmp(&done);
4888 __ Bind(&not_null);
4889 }
4890
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004891 // Note that when Baker read barriers are enabled, the type
4892 // checks are performed without read barriers. This is fine,
4893 // even in the case where a class object is in the from-space
4894 // after the flip, as a comparison involving such a type would
4895 // not produce a false positive; it may of course produce a
4896 // false negative, in which case we would take the ArraySet
4897 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004898
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004899 // /* HeapReference<Class> */ temp = array->klass_
4900 __ movl(temp, Address(array, class_offset));
4901 codegen_->MaybeRecordImplicitNullCheck(instruction);
4902 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004903
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004904 // /* HeapReference<Class> */ temp = temp->component_type_
4905 __ movl(temp, Address(temp, component_offset));
4906 // If heap poisoning is enabled, no need to unpoison `temp`
4907 // nor the object reference in `register_value->klass`, as
4908 // we are comparing two poisoned references.
4909 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004910
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004911 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4912 __ j(kEqual, &do_put);
4913 // If heap poisoning is enabled, the `temp` reference has
4914 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004915 __ MaybeUnpoisonHeapReference(temp);
4916
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004917 // If heap poisoning is enabled, no need to unpoison the
4918 // heap reference loaded below, as it is only used for a
4919 // comparison with null.
4920 __ cmpl(Address(temp, super_offset), Immediate(0));
4921 __ j(kNotEqual, slow_path->GetEntryLabel());
4922 __ Bind(&do_put);
4923 } else {
4924 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004925 }
4926 }
4927
4928 if (kPoisonHeapReferences) {
4929 __ movl(temp, register_value);
4930 __ PoisonHeapReference(temp);
4931 __ movl(address, temp);
4932 } else {
4933 __ movl(address, register_value);
4934 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004935 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004936 codegen_->MaybeRecordImplicitNullCheck(instruction);
4937 }
4938
4939 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4940 codegen_->MarkGCCard(
4941 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4942 __ Bind(&done);
4943
4944 if (slow_path != nullptr) {
4945 __ Bind(slow_path->GetExitLabel());
4946 }
4947
4948 break;
4949 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004950
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004951 case Primitive::kPrimInt: {
4952 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004953 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004954 if (value.IsRegister()) {
4955 __ movl(address, value.AsRegister<CpuRegister>());
4956 } else {
4957 DCHECK(value.IsConstant()) << value;
4958 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4959 __ movl(address, Immediate(v));
4960 }
4961 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004962 break;
4963 }
4964
4965 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004966 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004967 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004968 if (value.IsRegister()) {
4969 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004970 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004971 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004972 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004973 Address address_high =
4974 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004975 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004976 }
4977 break;
4978 }
4979
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004980 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004981 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004982 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004983 if (value.IsFpuRegister()) {
4984 __ movss(address, value.AsFpuRegister<XmmRegister>());
4985 } else {
4986 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004987 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004988 __ movl(address, Immediate(v));
4989 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004990 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004991 break;
4992 }
4993
4994 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004995 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004996 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004997 if (value.IsFpuRegister()) {
4998 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4999 codegen_->MaybeRecordImplicitNullCheck(instruction);
5000 } else {
5001 int64_t v =
5002 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005003 Address address_high =
5004 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005005 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5006 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005007 break;
5008 }
5009
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005010 case Primitive::kPrimVoid:
5011 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005012 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005013 }
5014}
5015
5016void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005017 LocationSummary* locations =
5018 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005019 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005020 if (!instruction->IsEmittedAtUseSite()) {
5021 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5022 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005023}
5024
5025void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005026 if (instruction->IsEmittedAtUseSite()) {
5027 return;
5028 }
5029
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005030 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005031 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005032 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5033 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005034 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005035 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005036 // Mask out most significant bit in case the array is String's array of char.
5037 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005038 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005039 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005040}
5041
5042void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005043 RegisterSet caller_saves = RegisterSet::Empty();
5044 InvokeRuntimeCallingConvention calling_convention;
5045 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5046 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5047 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005048 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005049 HInstruction* length = instruction->InputAt(1);
5050 if (!length->IsEmittedAtUseSite()) {
5051 locations->SetInAt(1, Location::RegisterOrConstant(length));
5052 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005053}
5054
5055void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5056 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005057 Location index_loc = locations->InAt(0);
5058 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005059 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005060
Mark Mendell99dbd682015-04-22 16:18:52 -04005061 if (length_loc.IsConstant()) {
5062 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5063 if (index_loc.IsConstant()) {
5064 // BCE will remove the bounds check if we are guarenteed to pass.
5065 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5066 if (index < 0 || index >= length) {
5067 codegen_->AddSlowPath(slow_path);
5068 __ jmp(slow_path->GetEntryLabel());
5069 } else {
5070 // Some optimization after BCE may have generated this, and we should not
5071 // generate a bounds check if it is a valid range.
5072 }
5073 return;
5074 }
5075
5076 // We have to reverse the jump condition because the length is the constant.
5077 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5078 __ cmpl(index_reg, Immediate(length));
5079 codegen_->AddSlowPath(slow_path);
5080 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005081 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005082 HInstruction* array_length = instruction->InputAt(1);
5083 if (array_length->IsEmittedAtUseSite()) {
5084 // Address the length field in the array.
5085 DCHECK(array_length->IsArrayLength());
5086 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5087 Location array_loc = array_length->GetLocations()->InAt(0);
5088 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005089 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005090 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5091 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005092 CpuRegister length_reg = CpuRegister(TMP);
5093 __ movl(length_reg, array_len);
5094 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005095 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005096 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005097 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005098 // Checking the bound for general case:
5099 // Array of char or String's array when the compression feature off.
5100 if (index_loc.IsConstant()) {
5101 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5102 __ cmpl(array_len, Immediate(value));
5103 } else {
5104 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5105 }
5106 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005107 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005108 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005109 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005110 }
5111 codegen_->AddSlowPath(slow_path);
5112 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005113 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005114}
5115
5116void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5117 CpuRegister card,
5118 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005119 CpuRegister value,
5120 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005121 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005122 if (value_can_be_null) {
5123 __ testl(value, value);
5124 __ j(kEqual, &is_null);
5125 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005126 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005127 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005128 __ movq(temp, object);
5129 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005130 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005131 if (value_can_be_null) {
5132 __ Bind(&is_null);
5133 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005134}
5135
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005136void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005137 LOG(FATAL) << "Unimplemented";
5138}
5139
5140void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005141 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5142}
5143
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005144void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005145 LocationSummary* locations =
5146 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005147 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005148}
5149
5150void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005151 HBasicBlock* block = instruction->GetBlock();
5152 if (block->GetLoopInformation() != nullptr) {
5153 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5154 // The back edge will generate the suspend check.
5155 return;
5156 }
5157 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5158 // The goto will generate the suspend check.
5159 return;
5160 }
5161 GenerateSuspendCheck(instruction, nullptr);
5162}
5163
5164void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5165 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005166 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005167 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5168 if (slow_path == nullptr) {
5169 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5170 instruction->SetSlowPath(slow_path);
5171 codegen_->AddSlowPath(slow_path);
5172 if (successor != nullptr) {
5173 DCHECK(successor->IsLoopHeader());
5174 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5175 }
5176 } else {
5177 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5178 }
5179
Andreas Gampe542451c2016-07-26 09:02:02 -07005180 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005181 /* no_rip */ true),
5182 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005183 if (successor == nullptr) {
5184 __ j(kNotEqual, slow_path->GetEntryLabel());
5185 __ Bind(slow_path->GetReturnLabel());
5186 } else {
5187 __ j(kEqual, codegen_->GetLabelOf(successor));
5188 __ jmp(slow_path->GetEntryLabel());
5189 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005190}
5191
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005192X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5193 return codegen_->GetAssembler();
5194}
5195
5196void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005197 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005198 Location source = move->GetSource();
5199 Location destination = move->GetDestination();
5200
5201 if (source.IsRegister()) {
5202 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005203 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005204 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005205 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005206 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005207 } else {
5208 DCHECK(destination.IsDoubleStackSlot());
5209 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005210 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005211 }
5212 } else if (source.IsStackSlot()) {
5213 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005214 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005215 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005216 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005217 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005218 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005219 } else {
5220 DCHECK(destination.IsStackSlot());
5221 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5222 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5223 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005224 } else if (source.IsDoubleStackSlot()) {
5225 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005226 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005227 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005228 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005229 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5230 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005231 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005232 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005233 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5234 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5235 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005236 } else if (source.IsConstant()) {
5237 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005238 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5239 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005240 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005241 if (value == 0) {
5242 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5243 } else {
5244 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5245 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005246 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005247 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005248 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005249 }
5250 } else if (constant->IsLongConstant()) {
5251 int64_t value = constant->AsLongConstant()->GetValue();
5252 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005253 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005254 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005256 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005257 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005258 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005259 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005260 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005261 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005262 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005263 } else {
5264 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005265 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005266 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5267 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005268 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005269 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005270 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005271 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005272 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005273 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005274 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005275 } else {
5276 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005277 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005278 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005279 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005280 } else if (source.IsFpuRegister()) {
5281 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005282 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005283 } else if (destination.IsStackSlot()) {
5284 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005285 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005286 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005287 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005288 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005289 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005291 }
5292}
5293
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005294void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005295 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005296 __ movl(Address(CpuRegister(RSP), mem), reg);
5297 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005298}
5299
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005300void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005301 ScratchRegisterScope ensure_scratch(
5302 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5303
5304 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5305 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5306 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5307 Address(CpuRegister(RSP), mem2 + stack_offset));
5308 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5309 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5310 CpuRegister(ensure_scratch.GetRegister()));
5311}
5312
Mark Mendell8a1c7282015-06-29 15:41:28 -04005313void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5314 __ movq(CpuRegister(TMP), reg1);
5315 __ movq(reg1, reg2);
5316 __ movq(reg2, CpuRegister(TMP));
5317}
5318
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005319void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5320 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5321 __ movq(Address(CpuRegister(RSP), mem), reg);
5322 __ movq(reg, CpuRegister(TMP));
5323}
5324
5325void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5326 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005327 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005328
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005329 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5330 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5331 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5332 Address(CpuRegister(RSP), mem2 + stack_offset));
5333 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5334 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5335 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005336}
5337
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005338void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5339 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5340 __ movss(Address(CpuRegister(RSP), mem), reg);
5341 __ movd(reg, CpuRegister(TMP));
5342}
5343
5344void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5345 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5346 __ movsd(Address(CpuRegister(RSP), mem), reg);
5347 __ movd(reg, CpuRegister(TMP));
5348}
5349
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005350void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005351 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005352 Location source = move->GetSource();
5353 Location destination = move->GetDestination();
5354
5355 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005356 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005357 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005358 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005359 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005360 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005361 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005362 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5363 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005364 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005365 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005366 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005367 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5368 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005369 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005370 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5371 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5372 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005373 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005374 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005375 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005376 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005377 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005378 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005379 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005380 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005381 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005382 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005383 }
5384}
5385
5386
5387void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5388 __ pushq(CpuRegister(reg));
5389}
5390
5391
5392void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5393 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005394}
5395
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005396void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005397 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005398 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5399 Immediate(mirror::Class::kStatusInitialized));
5400 __ j(kLess, slow_path->GetEntryLabel());
5401 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005402 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005403}
5404
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005405HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5406 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005407 switch (desired_class_load_kind) {
5408 case HLoadClass::LoadKind::kReferrersClass:
5409 break;
5410 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5411 DCHECK(!GetCompilerOptions().GetCompilePic());
5412 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5413 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5414 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5415 DCHECK(GetCompilerOptions().GetCompilePic());
5416 break;
5417 case HLoadClass::LoadKind::kBootImageAddress:
5418 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005419 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005420 break;
5421 case HLoadClass::LoadKind::kDexCachePcRelative:
5422 DCHECK(!Runtime::Current()->UseJitCompilation());
5423 break;
5424 case HLoadClass::LoadKind::kDexCacheViaMethod:
5425 break;
5426 }
5427 return desired_class_load_kind;
5428}
5429
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005430void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005431 if (cls->NeedsAccessCheck()) {
5432 InvokeRuntimeCallingConvention calling_convention;
5433 CodeGenerator::CreateLoadClassLocationSummary(
5434 cls,
5435 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5436 Location::RegisterLocation(RAX),
5437 /* code_generator_supports_read_barrier */ true);
5438 return;
5439 }
5440
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005441 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5442 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005443 ? LocationSummary::kCallOnSlowPath
5444 : LocationSummary::kNoCall;
5445 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005446 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005447 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005448 }
5449
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005450 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5451 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5452 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5453 locations->SetInAt(0, Location::RequiresRegister());
5454 }
5455 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005456}
5457
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005458Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
5459 dex::TypeIndex dex_index,
5460 uint64_t address) {
5461 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
5462 // Add a patch entry and return the label.
5463 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
5464 PatchInfo<Label>* info = &jit_class_patches_.back();
5465 return &info->label;
5466}
5467
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005468void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005469 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005470 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08005471 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescuba45db02016-07-12 22:53:02 +01005472 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005473 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005474 return;
5475 }
5476
Roland Levillain0d5a2812015-11-13 10:07:31 +00005477 Location out_loc = locations->Out();
5478 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005479
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005480 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5481 ? kWithoutReadBarrier
5482 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005483 bool generate_null_check = false;
5484 switch (cls->GetLoadKind()) {
5485 case HLoadClass::LoadKind::kReferrersClass: {
5486 DCHECK(!cls->CanCallRuntime());
5487 DCHECK(!cls->MustGenerateClinitCheck());
5488 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5489 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5490 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005491 cls,
5492 out_loc,
5493 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005494 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005495 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005496 break;
5497 }
5498 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005499 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005500 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5501 codegen_->RecordTypePatch(cls);
5502 break;
5503 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005504 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005505 DCHECK_NE(cls->GetAddress(), 0u);
5506 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5507 __ movl(out, Immediate(address)); // Zero-extended.
5508 codegen_->RecordSimplePatch();
5509 break;
5510 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005511 case HLoadClass::LoadKind::kJitTableAddress: {
5512 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5513 /* no_rip */ true);
5514 Label* fixup_label =
5515 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005516 // /* GcRoot<mirror::Class> */ out = *address
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005517 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005518 break;
5519 }
5520 case HLoadClass::LoadKind::kDexCachePcRelative: {
5521 uint32_t offset = cls->GetDexCacheElementOffset();
5522 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5523 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5524 /* no_rip */ false);
5525 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005526 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005527 generate_null_check = !cls->IsInDexCache();
5528 break;
5529 }
5530 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5531 // /* GcRoot<mirror::Class>[] */ out =
5532 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5533 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5534 __ movq(out,
5535 Address(current_method,
5536 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5537 // /* GcRoot<mirror::Class> */ out = out[type_index]
5538 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005539 cls,
5540 out_loc,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005541 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
Roland Levillain00468f32016-10-27 18:02:48 +01005542 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005543 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005544 generate_null_check = !cls->IsInDexCache();
5545 break;
5546 }
5547 default:
5548 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5549 UNREACHABLE();
5550 }
5551
5552 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5553 DCHECK(cls->CanCallRuntime());
5554 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5555 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5556 codegen_->AddSlowPath(slow_path);
5557 if (generate_null_check) {
5558 __ testl(out, out);
5559 __ j(kEqual, slow_path->GetEntryLabel());
5560 }
5561 if (cls->MustGenerateClinitCheck()) {
5562 GenerateClassInitializationCheck(slow_path, out);
5563 } else {
5564 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005565 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005566 }
5567}
5568
5569void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5570 LocationSummary* locations =
5571 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5572 locations->SetInAt(0, Location::RequiresRegister());
5573 if (check->HasUses()) {
5574 locations->SetOut(Location::SameAsFirstInput());
5575 }
5576}
5577
5578void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005579 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005580 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005581 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005582 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005583 GenerateClassInitializationCheck(slow_path,
5584 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005585}
5586
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005587HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5588 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005589 switch (desired_string_load_kind) {
5590 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5591 DCHECK(!GetCompilerOptions().GetCompilePic());
5592 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5593 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5594 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5595 DCHECK(GetCompilerOptions().GetCompilePic());
5596 break;
5597 case HLoadString::LoadKind::kBootImageAddress:
5598 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005599 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005600 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005601 break;
5602 case HLoadString::LoadKind::kDexCacheViaMethod:
5603 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005604 case HLoadString::LoadKind::kJitTableAddress:
5605 DCHECK(Runtime::Current()->UseJitCompilation());
5606 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005607 }
5608 return desired_string_load_kind;
5609}
5610
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005611void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005612 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005613 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005614 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005615 locations->SetOut(Location::RegisterLocation(RAX));
5616 } else {
5617 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005618 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5619 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5620 // Rely on the pResolveString and/or marking to save everything.
5621 // Custom calling convention: RAX serves as both input and output.
5622 RegisterSet caller_saves = RegisterSet::Empty();
5623 caller_saves.Add(Location::RegisterLocation(RAX));
5624 locations->SetCustomSlowPathCallerSaves(caller_saves);
5625 } else {
5626 // For non-Baker read barrier we have a temp-clobbering call.
5627 }
5628 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005629 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005630}
5631
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005632Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005633 dex::StringIndex dex_index,
5634 Handle<mirror::String> handle) {
5635 jit_string_roots_.Overwrite(
5636 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005637 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005638 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005639 PatchInfo<Label>* info = &jit_string_patches_.back();
5640 return &info->label;
5641}
5642
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005643// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5644// move.
5645void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005646 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005647 Location out_loc = locations->Out();
5648 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005649
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005650 switch (load->GetLoadKind()) {
5651 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005652 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005653 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005654 return; // No dex cache slow path.
5655 }
5656 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005657 uint32_t address = dchecked_integral_cast<uint32_t>(
5658 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5659 DCHECK_NE(address, 0u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005660 __ movl(out, Immediate(address)); // Zero-extended.
5661 codegen_->RecordSimplePatch();
5662 return; // No dex cache slow path.
5663 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005664 case HLoadString::LoadKind::kBssEntry: {
5665 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5666 /* no_rip */ false);
5667 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5668 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005669 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005670 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5671 codegen_->AddSlowPath(slow_path);
5672 __ testl(out, out);
5673 __ j(kEqual, slow_path->GetEntryLabel());
5674 __ Bind(slow_path->GetExitLabel());
5675 return;
5676 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005677 case HLoadString::LoadKind::kJitTableAddress: {
5678 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5679 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005680 Label* fixup_label = codegen_->NewJitRootStringPatch(
5681 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005682 // /* GcRoot<mirror::String> */ out = *address
5683 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5684 return;
5685 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005686 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005687 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005688 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005689
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005690 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005691 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005692 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005693 codegen_->InvokeRuntime(kQuickResolveString,
5694 load,
5695 load->GetDexPc());
5696 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005697}
5698
David Brazdilcb1c0552015-08-04 16:22:25 +01005699static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005700 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005701 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005702}
5703
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005704void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5705 LocationSummary* locations =
5706 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5707 locations->SetOut(Location::RequiresRegister());
5708}
5709
5710void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005711 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5712}
5713
5714void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5715 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5716}
5717
5718void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5719 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005720}
5721
5722void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5723 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005724 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005725 InvokeRuntimeCallingConvention calling_convention;
5726 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5727}
5728
5729void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005730 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005731 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005732}
5733
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005734static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5735 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005736 // We need a temporary for holding the iftable length.
5737 return true;
5738 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005739 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005740 !kUseBakerReadBarrier &&
5741 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005742 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5743 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5744}
5745
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005746static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5747 return kEmitCompilerReadBarrier &&
5748 !kUseBakerReadBarrier &&
5749 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5750 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5751 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5752}
5753
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005754void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005755 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005756 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005757 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005758 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005759 case TypeCheckKind::kExactCheck:
5760 case TypeCheckKind::kAbstractClassCheck:
5761 case TypeCheckKind::kClassHierarchyCheck:
5762 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005763 call_kind =
5764 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005765 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005766 break;
5767 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005768 case TypeCheckKind::kUnresolvedCheck:
5769 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005770 call_kind = LocationSummary::kCallOnSlowPath;
5771 break;
5772 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005773
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005774 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005775 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005776 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005777 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005778 locations->SetInAt(0, Location::RequiresRegister());
5779 locations->SetInAt(1, Location::Any());
5780 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5781 locations->SetOut(Location::RequiresRegister());
5782 // When read barriers are enabled, we need a temporary register for
5783 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005784 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005785 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005786 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005787}
5788
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005789void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005790 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005791 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005792 Location obj_loc = locations->InAt(0);
5793 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005794 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005795 Location out_loc = locations->Out();
5796 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005797 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005798 locations->GetTemp(0) :
5799 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005800 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005801 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5802 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5803 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005804 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005805 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005806
5807 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005808 // Avoid null check if we know obj is not null.
5809 if (instruction->MustDoNullCheck()) {
5810 __ testl(obj, obj);
5811 __ j(kEqual, &zero);
5812 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005813
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005814 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005815 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005816 // /* HeapReference<Class> */ out = obj->klass_
5817 GenerateReferenceLoadTwoRegisters(instruction,
5818 out_loc,
5819 obj_loc,
5820 class_offset,
5821 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005822 if (cls.IsRegister()) {
5823 __ cmpl(out, cls.AsRegister<CpuRegister>());
5824 } else {
5825 DCHECK(cls.IsStackSlot()) << cls;
5826 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5827 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005828 if (zero.IsLinked()) {
5829 // Classes must be equal for the instanceof to succeed.
5830 __ j(kNotEqual, &zero);
5831 __ movl(out, Immediate(1));
5832 __ jmp(&done);
5833 } else {
5834 __ setcc(kEqual, out);
5835 // setcc only sets the low byte.
5836 __ andl(out, Immediate(1));
5837 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005838 break;
5839 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005840
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005842 // /* HeapReference<Class> */ out = obj->klass_
5843 GenerateReferenceLoadTwoRegisters(instruction,
5844 out_loc,
5845 obj_loc,
5846 class_offset,
5847 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005848 // If the class is abstract, we eagerly fetch the super class of the
5849 // object to avoid doing a comparison we know will fail.
5850 NearLabel loop, success;
5851 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005852 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005853 GenerateReferenceLoadOneRegister(instruction,
5854 out_loc,
5855 super_offset,
5856 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005857 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005858 __ testl(out, out);
5859 // If `out` is null, we use it for the result, and jump to `done`.
5860 __ j(kEqual, &done);
5861 if (cls.IsRegister()) {
5862 __ cmpl(out, cls.AsRegister<CpuRegister>());
5863 } else {
5864 DCHECK(cls.IsStackSlot()) << cls;
5865 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5866 }
5867 __ j(kNotEqual, &loop);
5868 __ movl(out, Immediate(1));
5869 if (zero.IsLinked()) {
5870 __ jmp(&done);
5871 }
5872 break;
5873 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005875 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005876 // /* HeapReference<Class> */ out = obj->klass_
5877 GenerateReferenceLoadTwoRegisters(instruction,
5878 out_loc,
5879 obj_loc,
5880 class_offset,
5881 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005882 // Walk over the class hierarchy to find a match.
5883 NearLabel loop, success;
5884 __ Bind(&loop);
5885 if (cls.IsRegister()) {
5886 __ cmpl(out, cls.AsRegister<CpuRegister>());
5887 } else {
5888 DCHECK(cls.IsStackSlot()) << cls;
5889 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5890 }
5891 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005892 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005893 GenerateReferenceLoadOneRegister(instruction,
5894 out_loc,
5895 super_offset,
5896 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005897 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005898 __ testl(out, out);
5899 __ j(kNotEqual, &loop);
5900 // If `out` is null, we use it for the result, and jump to `done`.
5901 __ jmp(&done);
5902 __ Bind(&success);
5903 __ movl(out, Immediate(1));
5904 if (zero.IsLinked()) {
5905 __ jmp(&done);
5906 }
5907 break;
5908 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005909
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005910 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005911 // /* HeapReference<Class> */ out = obj->klass_
5912 GenerateReferenceLoadTwoRegisters(instruction,
5913 out_loc,
5914 obj_loc,
5915 class_offset,
5916 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005917 // Do an exact check.
5918 NearLabel exact_check;
5919 if (cls.IsRegister()) {
5920 __ cmpl(out, cls.AsRegister<CpuRegister>());
5921 } else {
5922 DCHECK(cls.IsStackSlot()) << cls;
5923 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5924 }
5925 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005926 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005927 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005928 GenerateReferenceLoadOneRegister(instruction,
5929 out_loc,
5930 component_offset,
5931 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005932 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005933 __ testl(out, out);
5934 // If `out` is null, we use it for the result, and jump to `done`.
5935 __ j(kEqual, &done);
5936 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5937 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005938 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005939 __ movl(out, Immediate(1));
5940 __ jmp(&done);
5941 break;
5942 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005943
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005944 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005945 // No read barrier since the slow path will retry upon failure.
5946 // /* HeapReference<Class> */ out = obj->klass_
5947 GenerateReferenceLoadTwoRegisters(instruction,
5948 out_loc,
5949 obj_loc,
5950 class_offset,
5951 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005952 if (cls.IsRegister()) {
5953 __ cmpl(out, cls.AsRegister<CpuRegister>());
5954 } else {
5955 DCHECK(cls.IsStackSlot()) << cls;
5956 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5957 }
5958 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005959 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5960 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005961 codegen_->AddSlowPath(slow_path);
5962 __ j(kNotEqual, slow_path->GetEntryLabel());
5963 __ movl(out, Immediate(1));
5964 if (zero.IsLinked()) {
5965 __ jmp(&done);
5966 }
5967 break;
5968 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005969
Calin Juravle98893e12015-10-02 21:05:03 +01005970 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005971 case TypeCheckKind::kInterfaceCheck: {
5972 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005973 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005974 // cases.
5975 //
5976 // We cannot directly call the InstanceofNonTrivial runtime
5977 // entry point without resorting to a type checking slow path
5978 // here (i.e. by calling InvokeRuntime directly), as it would
5979 // require to assign fixed registers for the inputs of this
5980 // HInstanceOf instruction (following the runtime calling
5981 // convention), which might be cluttered by the potential first
5982 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005983 //
5984 // TODO: Introduce a new runtime entry point taking the object
5985 // to test (instead of its class) as argument, and let it deal
5986 // with the read barrier issues. This will let us refactor this
5987 // case of the `switch` code as it was previously (with a direct
5988 // call to the runtime not using a type checking slow path).
5989 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005990 DCHECK(locations->OnlyCallsOnSlowPath());
5991 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5992 /* is_fatal */ false);
5993 codegen_->AddSlowPath(slow_path);
5994 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005995 if (zero.IsLinked()) {
5996 __ jmp(&done);
5997 }
5998 break;
5999 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006000 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006001
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006002 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006003 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006004 __ xorl(out, out);
6005 }
6006
6007 if (done.IsLinked()) {
6008 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006009 }
6010
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006011 if (slow_path != nullptr) {
6012 __ Bind(slow_path->GetExitLabel());
6013 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006014}
6015
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006016static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006017 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006018 case TypeCheckKind::kExactCheck:
6019 case TypeCheckKind::kAbstractClassCheck:
6020 case TypeCheckKind::kClassHierarchyCheck:
6021 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006022 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006023 case TypeCheckKind::kInterfaceCheck:
6024 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006025 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006026 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006027 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006028 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006029 LOG(FATAL) << "Unreachable";
6030 UNREACHABLE();
6031}
6032
6033void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6034 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6035 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6036 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6037 LocationSummary::CallKind call_kind = is_fatal_slow_path
6038 ? LocationSummary::kNoCall
6039 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006040 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6041 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006042 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6043 // Require a register for the interface check since there is a loop that compares the class to
6044 // a memory address.
6045 locations->SetInAt(1, Location::RequiresRegister());
6046 } else {
6047 locations->SetInAt(1, Location::Any());
6048 }
6049
Roland Levillain0d5a2812015-11-13 10:07:31 +00006050 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6051 locations->AddTemp(Location::RequiresRegister());
6052 // When read barriers are enabled, we need an additional temporary
6053 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006054 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006055 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006056 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006057}
6058
6059void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006060 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006061 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006062 Location obj_loc = locations->InAt(0);
6063 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006064 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006065 Location temp_loc = locations->GetTemp(0);
6066 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006067 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006068 locations->GetTemp(1) :
6069 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006070 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6071 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6072 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6073 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6074 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6075 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006076 const uint32_t object_array_data_offset =
6077 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006078
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006079 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6080 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6081 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006082 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006083 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006084 SlowPathCode* type_check_slow_path =
6085 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6086 is_type_check_slow_path_fatal);
6087 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006088
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006089
6090 NearLabel done;
6091 // Avoid null check if we know obj is not null.
6092 if (instruction->MustDoNullCheck()) {
6093 __ testl(obj, obj);
6094 __ j(kEqual, &done);
6095 }
6096
Roland Levillain0d5a2812015-11-13 10:07:31 +00006097 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006098 case TypeCheckKind::kExactCheck:
6099 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006100 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006101 GenerateReferenceLoadTwoRegisters(instruction,
6102 temp_loc,
6103 obj_loc,
6104 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006105 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006106 if (cls.IsRegister()) {
6107 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6108 } else {
6109 DCHECK(cls.IsStackSlot()) << cls;
6110 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6111 }
6112 // Jump to slow path for throwing the exception or doing a
6113 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006114 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006115 break;
6116 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006117
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006118 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006119 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006120 GenerateReferenceLoadTwoRegisters(instruction,
6121 temp_loc,
6122 obj_loc,
6123 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006124 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006125 // If the class is abstract, we eagerly fetch the super class of the
6126 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006127 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006128 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006129 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006130 GenerateReferenceLoadOneRegister(instruction,
6131 temp_loc,
6132 super_offset,
6133 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006134 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006135
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006136 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6137 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006138 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006139 // Otherwise, compare the classes.
6140 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006141 if (cls.IsRegister()) {
6142 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6143 } else {
6144 DCHECK(cls.IsStackSlot()) << cls;
6145 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6146 }
6147 __ j(kNotEqual, &loop);
6148 break;
6149 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006150
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006151 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006152 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006153 GenerateReferenceLoadTwoRegisters(instruction,
6154 temp_loc,
6155 obj_loc,
6156 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006157 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006158 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006159 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006160 __ Bind(&loop);
6161 if (cls.IsRegister()) {
6162 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6163 } else {
6164 DCHECK(cls.IsStackSlot()) << cls;
6165 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6166 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006167 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006168
Roland Levillain0d5a2812015-11-13 10:07:31 +00006169 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006170 GenerateReferenceLoadOneRegister(instruction,
6171 temp_loc,
6172 super_offset,
6173 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006174 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006175
6176 // If the class reference currently in `temp` is not null, jump
6177 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006178 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006179 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006180 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006181 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006182 break;
6183 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006184
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006185 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006186 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006187 GenerateReferenceLoadTwoRegisters(instruction,
6188 temp_loc,
6189 obj_loc,
6190 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006191 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006192 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006193 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006194 if (cls.IsRegister()) {
6195 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6196 } else {
6197 DCHECK(cls.IsStackSlot()) << cls;
6198 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6199 }
6200 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006201
6202 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006203 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006204 GenerateReferenceLoadOneRegister(instruction,
6205 temp_loc,
6206 component_offset,
6207 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006208 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006209
6210 // If the component type is not null (i.e. the object is indeed
6211 // an array), jump to label `check_non_primitive_component_type`
6212 // to further check that this component type is not a primitive
6213 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006214 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006215 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006216 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006217 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006218 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006219 break;
6220 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006221
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006222 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006223 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006224 //
6225 // We cannot directly call the CheckCast runtime entry point
6226 // without resorting to a type checking slow path here (i.e. by
6227 // calling InvokeRuntime directly), as it would require to
6228 // assign fixed registers for the inputs of this HInstanceOf
6229 // instruction (following the runtime calling convention), which
6230 // might be cluttered by the potential first read barrier
6231 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006232 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006233 break;
6234 }
6235
6236 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006237 // Fast path for the interface check. We always go slow path for heap poisoning since
6238 // unpoisoning cls would require an extra temp.
6239 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006240 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6241 // doing this.
6242 // /* HeapReference<Class> */ temp = obj->klass_
6243 GenerateReferenceLoadTwoRegisters(instruction,
6244 temp_loc,
6245 obj_loc,
6246 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006247 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006248
6249 // /* HeapReference<Class> */ temp = temp->iftable_
6250 GenerateReferenceLoadTwoRegisters(instruction,
6251 temp_loc,
6252 temp_loc,
6253 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006254 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006255 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006256 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006257 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006258 NearLabel start_loop;
6259 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006260 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006261 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006262 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6263 // Go to next interface if the classes do not match.
6264 __ cmpl(cls.AsRegister<CpuRegister>(),
6265 CodeGeneratorX86_64::ArrayAddress(temp,
6266 maybe_temp2_loc,
6267 TIMES_4,
6268 object_array_data_offset));
6269 __ j(kNotEqual, &start_loop); // Return if same class.
6270 } else {
6271 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006272 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006273 break;
6274 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006275
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006276 if (done.IsLinked()) {
6277 __ Bind(&done);
6278 }
6279
Roland Levillain0d5a2812015-11-13 10:07:31 +00006280 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006281}
6282
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006283void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6284 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006285 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006286 InvokeRuntimeCallingConvention calling_convention;
6287 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6288}
6289
6290void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006291 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006292 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006293 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006294 if (instruction->IsEnter()) {
6295 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6296 } else {
6297 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6298 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006299}
6300
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006301void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6302void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6303void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6304
6305void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6306 LocationSummary* locations =
6307 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6308 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6309 || instruction->GetResultType() == Primitive::kPrimLong);
6310 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006311 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006312 locations->SetOut(Location::SameAsFirstInput());
6313}
6314
6315void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6316 HandleBitwiseOperation(instruction);
6317}
6318
6319void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6320 HandleBitwiseOperation(instruction);
6321}
6322
6323void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6324 HandleBitwiseOperation(instruction);
6325}
6326
6327void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6328 LocationSummary* locations = instruction->GetLocations();
6329 Location first = locations->InAt(0);
6330 Location second = locations->InAt(1);
6331 DCHECK(first.Equals(locations->Out()));
6332
6333 if (instruction->GetResultType() == Primitive::kPrimInt) {
6334 if (second.IsRegister()) {
6335 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006336 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006337 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006338 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006339 } else {
6340 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006341 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006342 }
6343 } else if (second.IsConstant()) {
6344 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6345 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006346 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006347 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006348 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006349 } else {
6350 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006351 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006352 }
6353 } else {
6354 Address address(CpuRegister(RSP), second.GetStackIndex());
6355 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006356 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006357 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006358 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006359 } else {
6360 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006361 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006362 }
6363 }
6364 } else {
6365 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006366 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6367 bool second_is_constant = false;
6368 int64_t value = 0;
6369 if (second.IsConstant()) {
6370 second_is_constant = true;
6371 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006372 }
Mark Mendell40741f32015-04-20 22:10:34 -04006373 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006374
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006375 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006376 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006377 if (is_int32_value) {
6378 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6379 } else {
6380 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6381 }
6382 } else if (second.IsDoubleStackSlot()) {
6383 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006384 } else {
6385 __ andq(first_reg, second.AsRegister<CpuRegister>());
6386 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006387 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006388 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006389 if (is_int32_value) {
6390 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6391 } else {
6392 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6393 }
6394 } else if (second.IsDoubleStackSlot()) {
6395 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006396 } else {
6397 __ orq(first_reg, second.AsRegister<CpuRegister>());
6398 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006399 } else {
6400 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006401 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006402 if (is_int32_value) {
6403 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6404 } else {
6405 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6406 }
6407 } else if (second.IsDoubleStackSlot()) {
6408 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006409 } else {
6410 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6411 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006412 }
6413 }
6414}
6415
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006416void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6417 HInstruction* instruction,
6418 Location out,
6419 uint32_t offset,
6420 Location maybe_temp,
6421 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006422 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006423 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006424 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006425 if (kUseBakerReadBarrier) {
6426 // Load with fast path based Baker's read barrier.
6427 // /* HeapReference<Object> */ out = *(out + offset)
6428 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006429 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006430 } else {
6431 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006432 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006433 // in the following move operation, as we will need it for the
6434 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006435 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006436 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006437 // /* HeapReference<Object> */ out = *(out + offset)
6438 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006439 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006440 }
6441 } else {
6442 // Plain load with no read barrier.
6443 // /* HeapReference<Object> */ out = *(out + offset)
6444 __ movl(out_reg, Address(out_reg, offset));
6445 __ MaybeUnpoisonHeapReference(out_reg);
6446 }
6447}
6448
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006449void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6450 HInstruction* instruction,
6451 Location out,
6452 Location obj,
6453 uint32_t offset,
6454 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006455 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6456 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006457 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006458 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006459 if (kUseBakerReadBarrier) {
6460 // Load with fast path based Baker's read barrier.
6461 // /* HeapReference<Object> */ out = *(obj + offset)
6462 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006463 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006464 } else {
6465 // Load with slow path based read barrier.
6466 // /* HeapReference<Object> */ out = *(obj + offset)
6467 __ movl(out_reg, Address(obj_reg, offset));
6468 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6469 }
6470 } else {
6471 // Plain load with no read barrier.
6472 // /* HeapReference<Object> */ out = *(obj + offset)
6473 __ movl(out_reg, Address(obj_reg, offset));
6474 __ MaybeUnpoisonHeapReference(out_reg);
6475 }
6476}
6477
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006478void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6479 HInstruction* instruction,
6480 Location root,
6481 const Address& address,
6482 Label* fixup_label,
6483 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006484 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006485 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006486 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006487 if (kUseBakerReadBarrier) {
6488 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6489 // Baker's read barrier are used:
6490 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006491 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006492 // if (Thread::Current()->GetIsGcMarking()) {
6493 // root = ReadBarrier::Mark(root)
6494 // }
6495
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006496 // /* GcRoot<mirror::Object> */ root = *address
6497 __ movl(root_reg, address);
6498 if (fixup_label != nullptr) {
6499 __ Bind(fixup_label);
6500 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006501 static_assert(
6502 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6503 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6504 "have different sizes.");
6505 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6506 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6507 "have different sizes.");
6508
Vladimir Marko953437b2016-08-24 08:30:46 +00006509 // Slow path marking the GC root `root`.
6510 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006511 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006512 codegen_->AddSlowPath(slow_path);
6513
Andreas Gampe542451c2016-07-26 09:02:02 -07006514 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006515 /* no_rip */ true),
6516 Immediate(0));
6517 __ j(kNotEqual, slow_path->GetEntryLabel());
6518 __ Bind(slow_path->GetExitLabel());
6519 } else {
6520 // GC root loaded through a slow path for read barriers other
6521 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006522 // /* GcRoot<mirror::Object>* */ root = address
6523 __ leaq(root_reg, address);
6524 if (fixup_label != nullptr) {
6525 __ Bind(fixup_label);
6526 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006527 // /* mirror::Object* */ root = root->Read()
6528 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6529 }
6530 } else {
6531 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006532 // /* GcRoot<mirror::Object> */ root = *address
6533 __ movl(root_reg, address);
6534 if (fixup_label != nullptr) {
6535 __ Bind(fixup_label);
6536 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006537 // Note that GC roots are not affected by heap poisoning, thus we
6538 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006539 }
6540}
6541
6542void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6543 Location ref,
6544 CpuRegister obj,
6545 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006546 bool needs_null_check) {
6547 DCHECK(kEmitCompilerReadBarrier);
6548 DCHECK(kUseBakerReadBarrier);
6549
6550 // /* HeapReference<Object> */ ref = *(obj + offset)
6551 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006552 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006553}
6554
6555void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6556 Location ref,
6557 CpuRegister obj,
6558 uint32_t data_offset,
6559 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006560 bool needs_null_check) {
6561 DCHECK(kEmitCompilerReadBarrier);
6562 DCHECK(kUseBakerReadBarrier);
6563
Roland Levillain3d312422016-06-23 13:53:42 +01006564 static_assert(
6565 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6566 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006567 // /* HeapReference<Object> */ ref =
6568 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006569 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006570 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006571}
6572
6573void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6574 Location ref,
6575 CpuRegister obj,
6576 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006577 bool needs_null_check,
6578 bool always_update_field,
6579 CpuRegister* temp1,
6580 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006581 DCHECK(kEmitCompilerReadBarrier);
6582 DCHECK(kUseBakerReadBarrier);
6583
6584 // In slow path based read barriers, the read barrier call is
6585 // inserted after the original load. However, in fast path based
6586 // Baker's read barriers, we need to perform the load of
6587 // mirror::Object::monitor_ *before* the original reference load.
6588 // This load-load ordering is required by the read barrier.
6589 // The fast path/slow path (for Baker's algorithm) should look like:
6590 //
6591 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6592 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6593 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006594 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006595 // if (is_gray) {
6596 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6597 // }
6598 //
6599 // Note: the original implementation in ReadBarrier::Barrier is
6600 // slightly more complex as:
6601 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006602 // the high-bits of rb_state, which are expected to be all zeroes
6603 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6604 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006605 // - it performs additional checks that we do not do here for
6606 // performance reasons.
6607
6608 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006609 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6610
Vladimir Marko953437b2016-08-24 08:30:46 +00006611 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006612 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6613 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006614 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6615 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6616 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6617
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006618 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006619 // ref = ReadBarrier::Mark(ref);
6620 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6621 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006622 if (needs_null_check) {
6623 MaybeRecordImplicitNullCheck(instruction);
6624 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006625
6626 // Load fence to prevent load-load reordering.
6627 // Note that this is a no-op, thanks to the x86-64 memory model.
6628 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6629
6630 // The actual reference load.
6631 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006632 __ movl(ref_reg, src); // Flags are unaffected.
6633
6634 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6635 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006636 SlowPathCode* slow_path;
6637 if (always_update_field) {
6638 DCHECK(temp1 != nullptr);
6639 DCHECK(temp2 != nullptr);
6640 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6641 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6642 } else {
6643 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6644 instruction, ref, /* unpoison_ref_before_marking */ true);
6645 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006646 AddSlowPath(slow_path);
6647
6648 // We have done the "if" of the gray bit check above, now branch based on the flags.
6649 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006650
6651 // Object* ref = ref_addr->AsMirrorPtr()
6652 __ MaybeUnpoisonHeapReference(ref_reg);
6653
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006654 __ Bind(slow_path->GetExitLabel());
6655}
6656
6657void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6658 Location out,
6659 Location ref,
6660 Location obj,
6661 uint32_t offset,
6662 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006663 DCHECK(kEmitCompilerReadBarrier);
6664
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006665 // Insert a slow path based read barrier *after* the reference load.
6666 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006667 // If heap poisoning is enabled, the unpoisoning of the loaded
6668 // reference will be carried out by the runtime within the slow
6669 // path.
6670 //
6671 // Note that `ref` currently does not get unpoisoned (when heap
6672 // poisoning is enabled), which is alright as the `ref` argument is
6673 // not used by the artReadBarrierSlow entry point.
6674 //
6675 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6676 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6677 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6678 AddSlowPath(slow_path);
6679
Roland Levillain0d5a2812015-11-13 10:07:31 +00006680 __ jmp(slow_path->GetEntryLabel());
6681 __ Bind(slow_path->GetExitLabel());
6682}
6683
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006684void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6685 Location out,
6686 Location ref,
6687 Location obj,
6688 uint32_t offset,
6689 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006690 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006691 // Baker's read barriers shall be handled by the fast path
6692 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6693 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006694 // If heap poisoning is enabled, unpoisoning will be taken care of
6695 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006696 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006697 } else if (kPoisonHeapReferences) {
6698 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6699 }
6700}
6701
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006702void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6703 Location out,
6704 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006705 DCHECK(kEmitCompilerReadBarrier);
6706
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006707 // Insert a slow path based read barrier *after* the GC root load.
6708 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006709 // Note that GC roots are not affected by heap poisoning, so we do
6710 // not need to do anything special for this here.
6711 SlowPathCode* slow_path =
6712 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6713 AddSlowPath(slow_path);
6714
Roland Levillain0d5a2812015-11-13 10:07:31 +00006715 __ jmp(slow_path->GetEntryLabel());
6716 __ Bind(slow_path->GetExitLabel());
6717}
6718
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006719void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006720 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006721 LOG(FATAL) << "Unreachable";
6722}
6723
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006724void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006725 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006726 LOG(FATAL) << "Unreachable";
6727}
6728
Mark Mendellfe57faa2015-09-18 09:26:15 -04006729// Simple implementation of packed switch - generate cascaded compare/jumps.
6730void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6731 LocationSummary* locations =
6732 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6733 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006734 locations->AddTemp(Location::RequiresRegister());
6735 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006736}
6737
6738void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6739 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006740 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006741 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006742 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6743 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6744 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006745 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6746
6747 // Should we generate smaller inline compare/jumps?
6748 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6749 // Figure out the correct compare values and jump conditions.
6750 // Handle the first compare/branch as a special case because it might
6751 // jump to the default case.
6752 DCHECK_GT(num_entries, 2u);
6753 Condition first_condition;
6754 uint32_t index;
6755 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6756 if (lower_bound != 0) {
6757 first_condition = kLess;
6758 __ cmpl(value_reg_in, Immediate(lower_bound));
6759 __ j(first_condition, codegen_->GetLabelOf(default_block));
6760 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6761
6762 index = 1;
6763 } else {
6764 // Handle all the compare/jumps below.
6765 first_condition = kBelow;
6766 index = 0;
6767 }
6768
6769 // Handle the rest of the compare/jumps.
6770 for (; index + 1 < num_entries; index += 2) {
6771 int32_t compare_to_value = lower_bound + index + 1;
6772 __ cmpl(value_reg_in, Immediate(compare_to_value));
6773 // Jump to successors[index] if value < case_value[index].
6774 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6775 // Jump to successors[index + 1] if value == case_value[index + 1].
6776 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6777 }
6778
6779 if (index != num_entries) {
6780 // There are an odd number of entries. Handle the last one.
6781 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006782 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006783 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6784 }
6785
6786 // And the default for any other value.
6787 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6788 __ jmp(codegen_->GetLabelOf(default_block));
6789 }
6790 return;
6791 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006792
6793 // Remove the bias, if needed.
6794 Register value_reg_out = value_reg_in.AsRegister();
6795 if (lower_bound != 0) {
6796 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6797 value_reg_out = temp_reg.AsRegister();
6798 }
6799 CpuRegister value_reg(value_reg_out);
6800
6801 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006802 __ cmpl(value_reg, Immediate(num_entries - 1));
6803 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006804
Mark Mendell9c86b482015-09-18 13:36:07 -04006805 // We are in the range of the table.
6806 // Load the address of the jump table in the constant area.
6807 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006808
Mark Mendell9c86b482015-09-18 13:36:07 -04006809 // Load the (signed) offset from the jump table.
6810 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6811
6812 // Add the offset to the address of the table base.
6813 __ addq(temp_reg, base_reg);
6814
6815 // And jump.
6816 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006817}
6818
Aart Bikc5d47542016-01-27 17:00:35 -08006819void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6820 if (value == 0) {
6821 __ xorl(dest, dest);
6822 } else {
6823 __ movl(dest, Immediate(value));
6824 }
6825}
6826
Mark Mendell92e83bf2015-05-07 11:25:03 -04006827void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6828 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006829 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006830 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006831 } else if (IsUint<32>(value)) {
6832 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006833 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6834 } else {
6835 __ movq(dest, Immediate(value));
6836 }
6837}
6838
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006839void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6840 if (value == 0) {
6841 __ xorps(dest, dest);
6842 } else {
6843 __ movss(dest, LiteralInt32Address(value));
6844 }
6845}
6846
6847void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6848 if (value == 0) {
6849 __ xorpd(dest, dest);
6850 } else {
6851 __ movsd(dest, LiteralInt64Address(value));
6852 }
6853}
6854
6855void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6856 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6857}
6858
6859void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6860 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6861}
6862
Aart Bika19616e2016-02-01 18:57:58 -08006863void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6864 if (value == 0) {
6865 __ testl(dest, dest);
6866 } else {
6867 __ cmpl(dest, Immediate(value));
6868 }
6869}
6870
6871void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6872 if (IsInt<32>(value)) {
6873 if (value == 0) {
6874 __ testq(dest, dest);
6875 } else {
6876 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6877 }
6878 } else {
6879 // Value won't fit in an int.
6880 __ cmpq(dest, LiteralInt64Address(value));
6881 }
6882}
6883
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006884void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6885 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006886 GenerateIntCompare(lhs_reg, rhs);
6887}
6888
6889void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006890 if (rhs.IsConstant()) {
6891 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006892 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006893 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006894 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006895 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006896 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006897 }
6898}
6899
6900void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6901 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6902 if (rhs.IsConstant()) {
6903 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6904 Compare64BitValue(lhs_reg, value);
6905 } else if (rhs.IsDoubleStackSlot()) {
6906 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6907 } else {
6908 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6909 }
6910}
6911
6912Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6913 Location index,
6914 ScaleFactor scale,
6915 uint32_t data_offset) {
6916 return index.IsConstant() ?
6917 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6918 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6919}
6920
Mark Mendellcfa410b2015-05-25 16:02:44 -04006921void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6922 DCHECK(dest.IsDoubleStackSlot());
6923 if (IsInt<32>(value)) {
6924 // Can move directly as an int32 constant.
6925 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6926 Immediate(static_cast<int32_t>(value)));
6927 } else {
6928 Load64BitValue(CpuRegister(TMP), value);
6929 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6930 }
6931}
6932
Mark Mendell9c86b482015-09-18 13:36:07 -04006933/**
6934 * Class to handle late fixup of offsets into constant area.
6935 */
6936class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6937 public:
6938 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6939 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6940
6941 protected:
6942 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6943
6944 CodeGeneratorX86_64* codegen_;
6945
6946 private:
6947 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6948 // Patch the correct offset for the instruction. We use the address of the
6949 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6950 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6951 int32_t relative_position = constant_offset - pos;
6952
6953 // Patch in the right value.
6954 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6955 }
6956
6957 // Location in constant area that the fixup refers to.
6958 size_t offset_into_constant_area_;
6959};
6960
6961/**
6962 t * Class to handle late fixup of offsets to a jump table that will be created in the
6963 * constant area.
6964 */
6965class JumpTableRIPFixup : public RIPFixup {
6966 public:
6967 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6968 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6969
6970 void CreateJumpTable() {
6971 X86_64Assembler* assembler = codegen_->GetAssembler();
6972
6973 // Ensure that the reference to the jump table has the correct offset.
6974 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6975 SetOffset(offset_in_constant_table);
6976
6977 // Compute the offset from the start of the function to this jump table.
6978 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6979
6980 // Populate the jump table with the correct values for the jump table.
6981 int32_t num_entries = switch_instr_->GetNumEntries();
6982 HBasicBlock* block = switch_instr_->GetBlock();
6983 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6984 // The value that we want is the target offset - the position of the table.
6985 for (int32_t i = 0; i < num_entries; i++) {
6986 HBasicBlock* b = successors[i];
6987 Label* l = codegen_->GetLabelOf(b);
6988 DCHECK(l->IsBound());
6989 int32_t offset_to_block = l->Position() - current_table_offset;
6990 assembler->AppendInt32(offset_to_block);
6991 }
6992 }
6993
6994 private:
6995 const HPackedSwitch* switch_instr_;
6996};
6997
Mark Mendellf55c3e02015-03-26 21:07:46 -04006998void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6999 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007000 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007001 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7002 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007003 assembler->Align(4, 0);
7004 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007005
7006 // Populate any jump tables.
7007 for (auto jump_table : fixups_to_jump_tables_) {
7008 jump_table->CreateJumpTable();
7009 }
7010
7011 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007012 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007013 }
7014
7015 // And finish up.
7016 CodeGenerator::Finalize(allocator);
7017}
7018
Mark Mendellf55c3e02015-03-26 21:07:46 -04007019Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7020 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7021 return Address::RIP(fixup);
7022}
7023
7024Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7025 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7026 return Address::RIP(fixup);
7027}
7028
7029Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7030 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7031 return Address::RIP(fixup);
7032}
7033
7034Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7035 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7036 return Address::RIP(fixup);
7037}
7038
Andreas Gampe85b62f22015-09-09 13:15:38 -07007039// TODO: trg as memory.
7040void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7041 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007042 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007043 return;
7044 }
7045
7046 DCHECK_NE(type, Primitive::kPrimVoid);
7047
7048 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7049 if (trg.Equals(return_loc)) {
7050 return;
7051 }
7052
7053 // Let the parallel move resolver take care of all of this.
7054 HParallelMove parallel_move(GetGraph()->GetArena());
7055 parallel_move.AddMove(return_loc, trg, type, nullptr);
7056 GetMoveResolver()->EmitNativeCode(&parallel_move);
7057}
7058
Mark Mendell9c86b482015-09-18 13:36:07 -04007059Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7060 // Create a fixup to be used to create and address the jump table.
7061 JumpTableRIPFixup* table_fixup =
7062 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7063
7064 // We have to populate the jump tables.
7065 fixups_to_jump_tables_.push_back(table_fixup);
7066 return Address::RIP(table_fixup);
7067}
7068
Mark Mendellea5af682015-10-22 17:35:49 -04007069void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7070 const Address& addr_high,
7071 int64_t v,
7072 HInstruction* instruction) {
7073 if (IsInt<32>(v)) {
7074 int32_t v_32 = v;
7075 __ movq(addr_low, Immediate(v_32));
7076 MaybeRecordImplicitNullCheck(instruction);
7077 } else {
7078 // Didn't fit in a register. Do it in pieces.
7079 int32_t low_v = Low32Bits(v);
7080 int32_t high_v = High32Bits(v);
7081 __ movl(addr_low, Immediate(low_v));
7082 MaybeRecordImplicitNullCheck(instruction);
7083 __ movl(addr_high, Immediate(high_v));
7084 }
7085}
7086
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007087void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7088 const uint8_t* roots_data,
7089 const PatchInfo<Label>& info,
7090 uint64_t index_in_table) const {
7091 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7092 uintptr_t address =
7093 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7094 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7095 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7096 dchecked_integral_cast<uint32_t>(address);
7097}
7098
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007099void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7100 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007101 const auto& it = jit_string_roots_.find(
7102 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007103 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007104 PatchJitRootUse(code, roots_data, info, it->second);
7105 }
7106
7107 for (const PatchInfo<Label>& info : jit_class_patches_) {
7108 const auto& it = jit_class_roots_.find(
7109 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7110 DCHECK(it != jit_class_roots_.end());
7111 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007112 }
7113}
7114
Roland Levillain4d027112015-07-01 15:41:14 +01007115#undef __
7116
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007117} // namespace x86_64
7118} // namespace art