blob: 89f4ae04d70847a72363533154561e9e9c8ba342 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100202 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
250 Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100251 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000252 at_,
253 dex_pc_,
254 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000255 if (do_clinit_) {
256 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
257 } else {
258 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000261 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 if (out.IsValid()) {
264 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000265 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 }
267
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ jmp(GetExitLabel());
270 }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
273
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100274 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // The class this slow path will load.
276 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100277
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000278 // The instruction where this slow path is happening.
279 // (Might be the load class or an initialization check).
280 HInstruction* const at_;
281
282 // The dex PC of `at_`.
283 const uint32_t dex_pc_;
284
285 // Whether to initialize the class.
286 const bool do_clinit_;
287
288 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289};
290
Vladimir Markoaad75c62016-10-03 08:46:48 +0000291class LoadStringSlowPathX86_64 : public SlowPathCode {
292 public:
293 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
294
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296 LocationSummary* locations = instruction_->GetLocations();
297 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
298
299 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
300 __ Bind(GetEntryLabel());
301 SaveLiveRegisters(codegen, locations);
302
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800303 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100304 // Custom calling convention: RAX serves as both input and output.
305 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000306 x86_64_codegen->InvokeRuntime(kQuickResolveString,
307 instruction_,
308 instruction_->GetDexPc(),
309 this);
310 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
311 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
312 RestoreLiveRegisters(codegen, locations);
313
314 // Store the resolved String to the BSS entry.
315 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
316 locations->Out().AsRegister<CpuRegister>());
317 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
318 __ Bind(fixup_label);
319
320 __ jmp(GetExitLabel());
321 }
322
323 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
324
325 private:
326 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
327};
328
Andreas Gampe85b62f22015-09-09 13:15:38 -0700329class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100336 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000337 DCHECK(instruction_->IsCheckCast()
338 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Roland Levillain0d5a2812015-11-13 10:07:31 +0000340 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 if (!is_fatal_) {
344 SaveLiveRegisters(codegen, locations);
345 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
347 // We're moving two locations to locations that could overlap, so we need a parallel
348 // move resolver.
349 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
352 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800353 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800354 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
355 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100357 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
362 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100393 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100432 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000433 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100434 RestoreLiveRegisters(codegen, locations);
435 __ jmp(GetExitLabel());
436 }
437
438 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
439
440 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
442};
443
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100444// Slow path marking an object reference `ref` during a read
445// barrier. The field `obj.field` in the object `obj` holding this
446// reference does not get updated by this slow path after marking (see
447// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
448//
449// This means that after the execution of this slow path, `ref` will
450// always be up-to-date, but `obj.field` may not; i.e., after the
451// flip, `ref` will be a to-space reference, but `obj.field` will
452// probably still be a from-space reference (unless it gets updated by
453// another thread, or if another thread installed another object
454// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000455class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
456 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
458 Location ref,
459 bool unpoison_ref_before_marking)
460 : SlowPathCode(instruction),
461 ref_(ref),
462 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000463 DCHECK(kEmitCompilerReadBarrier);
464 }
465
466 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
467
468 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
469 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100470 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
471 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000472 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100473 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474 DCHECK(instruction_->IsInstanceFieldGet() ||
475 instruction_->IsStaticFieldGet() ||
476 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100477 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000478 instruction_->IsLoadClass() ||
479 instruction_->IsLoadString() ||
480 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100481 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100482 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
483 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 << "Unexpected instruction in read barrier marking slow path: "
485 << instruction_->DebugName();
486
487 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100488 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000489 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100490 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000491 }
Roland Levillain4359e612016-07-20 11:32:19 +0100492 // No need to save live registers; it's taken care of by the
493 // entrypoint. Also, there is no need to update the stack mask,
494 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100496 DCHECK_NE(ref_reg, RSP);
497 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100498 // "Compact" slow path, saving two moves.
499 //
500 // Instead of using the standard runtime calling convention (input
501 // and output in R0):
502 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100504 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100505 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100506 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // of a dedicated entrypoint:
509 //
510 // rX <- ReadBarrierMarkRegX(rX)
511 //
512 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 // This runtime call does not require a stack map.
515 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 __ jmp(GetExitLabel());
517 }
518
519 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 // The location (register) of the marked object reference.
521 const Location ref_;
522 // Should the reference in `ref_` be unpoisoned prior to marking it?
523 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000524
525 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
526};
527
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528// Slow path marking an object reference `ref` during a read barrier,
529// and if needed, atomically updating the field `obj.field` in the
530// object `obj` holding this reference after marking (contrary to
531// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
532// `obj.field`).
533//
534// This means that after the execution of this slow path, both `ref`
535// and `obj.field` will be up-to-date; i.e., after the flip, both will
536// hold the same to-space reference (unless another thread installed
537// another object reference (different from `ref`) in `obj.field`).
538class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
539 public:
540 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
541 Location ref,
542 CpuRegister obj,
543 const Address& field_addr,
544 bool unpoison_ref_before_marking,
545 CpuRegister temp1,
546 CpuRegister temp2)
547 : SlowPathCode(instruction),
548 ref_(ref),
549 obj_(obj),
550 field_addr_(field_addr),
551 unpoison_ref_before_marking_(unpoison_ref_before_marking),
552 temp1_(temp1),
553 temp2_(temp2) {
554 DCHECK(kEmitCompilerReadBarrier);
555 }
556
557 const char* GetDescription() const OVERRIDE {
558 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
559 }
560
561 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
562 LocationSummary* locations = instruction_->GetLocations();
563 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
564 Register ref_reg = ref_cpu_reg.AsRegister();
565 DCHECK(locations->CanCall());
566 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
567 // This slow path is only used by the UnsafeCASObject intrinsic.
568 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
569 << "Unexpected instruction in read barrier marking and field updating slow path: "
570 << instruction_->DebugName();
571 DCHECK(instruction_->GetLocations()->Intrinsified());
572 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
573
574 __ Bind(GetEntryLabel());
575 if (unpoison_ref_before_marking_) {
576 // Object* ref = ref_addr->AsMirrorPtr()
577 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
578 }
579
580 // Save the old (unpoisoned) reference.
581 __ movl(temp1_, ref_cpu_reg);
582
583 // No need to save live registers; it's taken care of by the
584 // entrypoint. Also, there is no need to update the stack mask,
585 // as this runtime call will not trigger a garbage collection.
586 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
587 DCHECK_NE(ref_reg, RSP);
588 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
589 // "Compact" slow path, saving two moves.
590 //
591 // Instead of using the standard runtime calling convention (input
592 // and output in R0):
593 //
594 // RDI <- ref
595 // RAX <- ReadBarrierMark(RDI)
596 // ref <- RAX
597 //
598 // we just use rX (the register containing `ref`) as input and output
599 // of a dedicated entrypoint:
600 //
601 // rX <- ReadBarrierMarkRegX(rX)
602 //
603 int32_t entry_point_offset =
604 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
605 // This runtime call does not require a stack map.
606 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp1_, ref_cpu_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achived
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save RAX beforehand, and move the
627 // expected value (stored in `temp1_`) into EAX.
628 __ movq(temp2_, CpuRegister(RAX));
629 __ movl(CpuRegister(RAX), temp1_);
630
631 // Convenience aliases.
632 CpuRegister base = obj_;
633 CpuRegister expected = CpuRegister(RAX);
634 CpuRegister value = ref_cpu_reg;
635
636 bool base_equals_value = (base.AsRegister() == value.AsRegister());
637 Register value_reg = ref_reg;
638 if (kPoisonHeapReferences) {
639 if (base_equals_value) {
640 // If `base` and `value` are the same register location, move
641 // `value_reg` to a temporary register. This way, poisoning
642 // `value_reg` won't invalidate `base`.
643 value_reg = temp1_.AsRegister();
644 __ movl(CpuRegister(value_reg), base);
645 }
646
647 // Check that the register allocator did not assign the location
648 // of `expected` (RAX) to `value` nor to `base`, so that heap
649 // poisoning (when enabled) works as intended below.
650 // - If `value` were equal to `expected`, both references would
651 // be poisoned twice, meaning they would not be poisoned at
652 // all, as heap poisoning uses address negation.
653 // - If `base` were equal to `expected`, poisoning `expected`
654 // would invalidate `base`.
655 DCHECK_NE(value_reg, expected.AsRegister());
656 DCHECK_NE(base.AsRegister(), expected.AsRegister());
657
658 __ PoisonHeapReference(expected);
659 __ PoisonHeapReference(CpuRegister(value_reg));
660 }
661
662 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
663
664 // If heap poisoning is enabled, we need to unpoison the values
665 // that were poisoned earlier.
666 if (kPoisonHeapReferences) {
667 if (base_equals_value) {
668 // `value_reg` has been moved to a temporary register, no need
669 // to unpoison it.
670 } else {
671 __ UnpoisonHeapReference(CpuRegister(value_reg));
672 }
673 // No need to unpoison `expected` (RAX), as it is be overwritten below.
674 }
675
676 // Restore RAX.
677 __ movq(CpuRegister(RAX), temp2_);
678
679 __ Bind(&done);
680 __ jmp(GetExitLabel());
681 }
682
683 private:
684 // The location (register) of the marked object reference.
685 const Location ref_;
686 // The register containing the object holding the marked object reference field.
687 const CpuRegister obj_;
688 // The address of the marked reference field. The base of this address must be `obj_`.
689 const Address field_addr_;
690
691 // Should the reference in `ref_` be unpoisoned prior to marking it?
692 const bool unpoison_ref_before_marking_;
693
694 const CpuRegister temp1_;
695 const CpuRegister temp2_;
696
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
698};
699
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700// Slow path generating a read barrier for a heap reference.
701class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
702 public:
703 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
704 Location out,
705 Location ref,
706 Location obj,
707 uint32_t offset,
708 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000709 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710 out_(out),
711 ref_(ref),
712 obj_(obj),
713 offset_(offset),
714 index_(index) {
715 DCHECK(kEmitCompilerReadBarrier);
716 // If `obj` is equal to `out` or `ref`, it means the initial
717 // object has been overwritten by (or after) the heap object
718 // reference load to be instrumented, e.g.:
719 //
720 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000721 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000722 //
723 // In that case, we have lost the information about the original
724 // object, and the emitted read barrier cannot work properly.
725 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
726 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
727}
728
729 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
730 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
731 LocationSummary* locations = instruction_->GetLocations();
732 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
733 DCHECK(locations->CanCall());
734 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100735 DCHECK(instruction_->IsInstanceFieldGet() ||
736 instruction_->IsStaticFieldGet() ||
737 instruction_->IsArrayGet() ||
738 instruction_->IsInstanceOf() ||
739 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100740 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000741 << "Unexpected instruction in read barrier for heap reference slow path: "
742 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743
744 __ Bind(GetEntryLabel());
745 SaveLiveRegisters(codegen, locations);
746
747 // We may have to change the index's value, but as `index_` is a
748 // constant member (like other "inputs" of this slow path),
749 // introduce a copy of it, `index`.
750 Location index = index_;
751 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100752 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753 if (instruction_->IsArrayGet()) {
754 // Compute real offset and store it in index_.
755 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
756 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
757 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
758 // We are about to change the value of `index_reg` (see the
759 // calls to art::x86_64::X86_64Assembler::shll and
760 // art::x86_64::X86_64Assembler::AddImmediate below), but it
761 // has not been saved by the previous call to
762 // art::SlowPathCode::SaveLiveRegisters, as it is a
763 // callee-save register --
764 // art::SlowPathCode::SaveLiveRegisters does not consider
765 // callee-save registers, as it has been designed with the
766 // assumption that callee-save registers are supposed to be
767 // handled by the called function. So, as a callee-save
768 // register, `index_reg` _would_ eventually be saved onto
769 // the stack, but it would be too late: we would have
770 // changed its value earlier. Therefore, we manually save
771 // it here into another freely available register,
772 // `free_reg`, chosen of course among the caller-save
773 // registers (as a callee-save `free_reg` register would
774 // exhibit the same problem).
775 //
776 // Note we could have requested a temporary register from
777 // the register allocator instead; but we prefer not to, as
778 // this is a slow path, and we know we can find a
779 // caller-save register that is available.
780 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
781 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
782 index_reg = free_reg;
783 index = Location::RegisterLocation(index_reg);
784 } else {
785 // The initial register stored in `index_` has already been
786 // saved in the call to art::SlowPathCode::SaveLiveRegisters
787 // (as it is not a callee-save register), so we can freely
788 // use it.
789 }
790 // Shifting the index value contained in `index_reg` by the
791 // scale factor (2) cannot overflow in practice, as the
792 // runtime is unable to allocate object arrays with a size
793 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
794 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
795 static_assert(
796 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
797 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
798 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
799 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100800 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
801 // intrinsics, `index_` is not shifted by a scale factor of 2
802 // (as in the case of ArrayGet), as it is actually an offset
803 // to an object field within an object.
804 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000805 DCHECK(instruction_->GetLocations()->Intrinsified());
806 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
807 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
808 << instruction_->AsInvoke()->GetIntrinsic();
809 DCHECK_EQ(offset_, 0U);
810 DCHECK(index_.IsRegister());
811 }
812 }
813
814 // We're moving two or three locations to locations that could
815 // overlap, so we need a parallel move resolver.
816 InvokeRuntimeCallingConvention calling_convention;
817 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
818 parallel_move.AddMove(ref_,
819 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
820 Primitive::kPrimNot,
821 nullptr);
822 parallel_move.AddMove(obj_,
823 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
824 Primitive::kPrimNot,
825 nullptr);
826 if (index.IsValid()) {
827 parallel_move.AddMove(index,
828 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
829 Primitive::kPrimInt,
830 nullptr);
831 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
832 } else {
833 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
834 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
835 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100836 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000837 instruction_,
838 instruction_->GetDexPc(),
839 this);
840 CheckEntrypointTypes<
841 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
842 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
843
844 RestoreLiveRegisters(codegen, locations);
845 __ jmp(GetExitLabel());
846 }
847
848 const char* GetDescription() const OVERRIDE {
849 return "ReadBarrierForHeapReferenceSlowPathX86_64";
850 }
851
852 private:
853 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
854 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
855 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
856 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
857 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
858 return static_cast<CpuRegister>(i);
859 }
860 }
861 // We shall never fail to find a free caller-save register, as
862 // there are more than two core caller-save registers on x86-64
863 // (meaning it is possible to find one which is different from
864 // `ref` and `obj`).
865 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
866 LOG(FATAL) << "Could not find a free caller-save register";
867 UNREACHABLE();
868 }
869
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 const Location out_;
871 const Location ref_;
872 const Location obj_;
873 const uint32_t offset_;
874 // An additional location containing an index to an array.
875 // Only used for HArrayGet and the UnsafeGetObject &
876 // UnsafeGetObjectVolatile intrinsics.
877 const Location index_;
878
879 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
880};
881
882// Slow path generating a read barrier for a GC root.
883class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
884 public:
885 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000886 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000887 DCHECK(kEmitCompilerReadBarrier);
888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889
890 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
891 LocationSummary* locations = instruction_->GetLocations();
892 DCHECK(locations->CanCall());
893 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
895 << "Unexpected instruction in read barrier for GC root slow path: "
896 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000897
898 __ Bind(GetEntryLabel());
899 SaveLiveRegisters(codegen, locations);
900
901 InvokeRuntimeCallingConvention calling_convention;
902 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
903 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100904 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000905 instruction_,
906 instruction_->GetDexPc(),
907 this);
908 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
909 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
910
911 RestoreLiveRegisters(codegen, locations);
912 __ jmp(GetExitLabel());
913 }
914
915 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
916
917 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918 const Location out_;
919 const Location root_;
920
921 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
922};
923
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100925// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
926#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100927
Roland Levillain4fa13f62015-07-06 18:11:54 +0100928inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700929 switch (cond) {
930 case kCondEQ: return kEqual;
931 case kCondNE: return kNotEqual;
932 case kCondLT: return kLess;
933 case kCondLE: return kLessEqual;
934 case kCondGT: return kGreater;
935 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700936 case kCondB: return kBelow;
937 case kCondBE: return kBelowEqual;
938 case kCondA: return kAbove;
939 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700940 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100941 LOG(FATAL) << "Unreachable";
942 UNREACHABLE();
943}
944
Aart Bike9f37602015-10-09 11:15:55 -0700945// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100946inline Condition X86_64FPCondition(IfCondition cond) {
947 switch (cond) {
948 case kCondEQ: return kEqual;
949 case kCondNE: return kNotEqual;
950 case kCondLT: return kBelow;
951 case kCondLE: return kBelowEqual;
952 case kCondGT: return kAbove;
953 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700954 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100955 };
956 LOG(FATAL) << "Unreachable";
957 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700958}
959
Vladimir Markodc151b22015-10-15 18:02:30 +0100960HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
961 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100962 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000963 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100964}
965
Serguei Katkov288c7a82016-05-16 11:53:15 +0600966Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
967 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800968 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000969 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
970 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100971 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000972 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100973 uint32_t offset =
974 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
975 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000976 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100977 }
Vladimir Marko58155012015-08-19 12:49:41 +0000978 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000979 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000980 break;
981 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
982 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
983 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000984 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000985 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000986 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000987 // Bind a new fixup label at the end of the "movl" insn.
988 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100989 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000990 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000991 }
Vladimir Marko58155012015-08-19 12:49:41 +0000992 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000993 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000994 Register method_reg;
995 CpuRegister reg = temp.AsRegister<CpuRegister>();
996 if (current_method.IsRegister()) {
997 method_reg = current_method.AsRegister<Register>();
998 } else {
999 DCHECK(invoke->GetLocations()->Intrinsified());
1000 DCHECK(!current_method.IsValid());
1001 method_reg = reg.AsRegister();
1002 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1003 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001004 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001005 __ movq(reg,
1006 Address(CpuRegister(method_reg),
1007 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001008 // temp = temp[index_in_cache];
1009 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1010 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001011 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1012 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001013 }
Vladimir Marko58155012015-08-19 12:49:41 +00001014 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001015 return callee_method;
1016}
1017
1018void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1019 Location temp) {
1020 // All registers are assumed to be correctly set up.
1021 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001022
1023 switch (invoke->GetCodePtrLocation()) {
1024 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1025 __ call(&frame_entry_label_);
1026 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001027 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1028 // (callee_method + offset_of_quick_compiled_code)()
1029 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1030 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001031 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001032 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001033 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001034
1035 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001036}
1037
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001038void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1039 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1040 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1041 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001042
1043 // Use the calling convention instead of the location of the receiver, as
1044 // intrinsics may have put the receiver in a different register. In the intrinsics
1045 // slow path, the arguments have been moved to the right place, so here we are
1046 // guaranteed that the receiver is the first register of the calling convention.
1047 InvokeDexCallingConvention calling_convention;
1048 Register receiver = calling_convention.GetRegisterAt(0);
1049
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001051 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001052 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001053 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001054 // Instead of simply (possibly) unpoisoning `temp` here, we should
1055 // emit a read barrier for the previous class reference load.
1056 // However this is not required in practice, as this is an
1057 // intermediate/temporary reference and because the current
1058 // concurrent copying collector keeps the from-space memory
1059 // intact/accessible until the end of the marking phase (the
1060 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 __ MaybeUnpoisonHeapReference(temp);
1062 // temp = temp->GetMethodAt(method_offset);
1063 __ movq(temp, Address(temp, method_offset));
1064 // call temp->GetEntryPoint();
1065 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001066 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001067}
1068
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001069void CodeGeneratorX86_64::RecordSimplePatch() {
1070 if (GetCompilerOptions().GetIncludePatchInformation()) {
1071 simple_patches_.emplace_back();
1072 __ Bind(&simple_patches_.back());
1073 }
1074}
1075
Vladimir Markoaad75c62016-10-03 08:46:48 +00001076void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1077 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001078 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001079 __ Bind(&string_patches_.back().label);
1080}
1081
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001082void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08001083 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001084 __ Bind(&type_patches_.back().label);
1085}
1086
Vladimir Markoaad75c62016-10-03 08:46:48 +00001087Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1088 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001089 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001090 return &string_patches_.back().label;
1091}
1092
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001093Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1094 uint32_t element_offset) {
1095 // Add a patch entry and return the label.
1096 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1097 return &pc_relative_dex_cache_patches_.back().label;
1098}
1099
Vladimir Markoaad75c62016-10-03 08:46:48 +00001100// The label points to the end of the "movl" or another instruction but the literal offset
1101// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1102constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1103
1104template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1105inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1106 const ArenaDeque<PatchInfo<Label>>& infos,
1107 ArenaVector<LinkerPatch>* linker_patches) {
1108 for (const PatchInfo<Label>& info : infos) {
1109 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1110 linker_patches->push_back(
1111 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1112 }
1113}
1114
Vladimir Marko58155012015-08-19 12:49:41 +00001115void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1116 DCHECK(linker_patches->empty());
1117 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001118 pc_relative_dex_cache_patches_.size() +
1119 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001120 string_patches_.size() +
1121 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001122 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1124 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001125 for (const Label& label : simple_patches_) {
1126 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1127 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1128 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001129 if (!GetCompilerOptions().IsBootImage()) {
1130 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1131 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001132 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001133 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001134 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001135 // These are always PC-relative, see GetSupportedLoadClassKind().
1136 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001137}
1138
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001139void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001140 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001141}
1142
1143void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001144 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001145}
1146
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001147size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1148 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1149 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001150}
1151
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001152size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1153 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1154 return kX86_64WordSize;
1155}
1156
1157size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1158 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1159 return kX86_64WordSize;
1160}
1161
1162size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1163 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1164 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001165}
1166
Calin Juravle175dc732015-08-25 15:42:32 +01001167void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1168 HInstruction* instruction,
1169 uint32_t dex_pc,
1170 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001171 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001172 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1173 if (EntrypointRequiresStackMap(entrypoint)) {
1174 RecordPcInfo(instruction, dex_pc, slow_path);
1175 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001176}
1177
Roland Levillaindec8f632016-07-22 17:10:06 +01001178void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1179 HInstruction* instruction,
1180 SlowPathCode* slow_path) {
1181 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001182 GenerateInvokeRuntime(entry_point_offset);
1183}
1184
1185void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001186 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1187}
1188
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001189static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001190// Use a fake return address register to mimic Quick.
1191static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001192CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001193 const X86_64InstructionSetFeatures& isa_features,
1194 const CompilerOptions& compiler_options,
1195 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001196 : CodeGenerator(graph,
1197 kNumberOfCpuRegisters,
1198 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001199 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001200 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1201 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001202 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001203 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1204 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001205 compiler_options,
1206 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001207 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001208 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001209 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001210 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001211 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001212 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001213 constant_area_start_(0),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001214 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001215 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1216 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001217 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001218 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001219 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1220 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001221 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1222}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001223
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001224InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1225 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001226 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001227 assembler_(codegen->GetAssembler()),
1228 codegen_(codegen) {}
1229
David Brazdil58282f42016-01-14 12:45:10 +00001230void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001232 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001233
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001234 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001235 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001236}
1237
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001238static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001239 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001240}
David Srbecky9d8606d2015-04-12 09:35:32 +01001241
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001242static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001243 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001244}
1245
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001246void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001247 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001248 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001249 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001250 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001251 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001252
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001253 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001254 __ testq(CpuRegister(RAX), Address(
1255 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001256 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001257 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001258
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001259 if (HasEmptyFrame()) {
1260 return;
1261 }
1262
Nicolas Geoffray98893962015-01-21 12:32:32 +00001263 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001264 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001265 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001266 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001267 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1268 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001269 }
1270 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001271
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001272 int adjust = GetFrameSize() - GetCoreSpillSize();
1273 __ subq(CpuRegister(RSP), Immediate(adjust));
1274 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001275 uint32_t xmm_spill_location = GetFpuSpillStart();
1276 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001277
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001278 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1279 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001280 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1281 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1282 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001283 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001284 }
1285
Mingyao Yang063fc772016-08-02 11:02:54 -07001286 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1287 // Initialize should_deoptimize flag to 0.
1288 __ movl(Address(CpuRegister(RSP), xmm_spill_location - kShouldDeoptimizeFlagSize),
1289 Immediate(0));
1290 }
1291
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001292 // Save the current method if we need it. Note that we do not
1293 // do this in HCurrentMethod, as the instruction might have been removed
1294 // in the SSA graph.
1295 if (RequiresCurrentMethod()) {
1296 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1297 CpuRegister(kMethodRegisterArgument));
1298 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001299}
1300
1301void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001302 __ cfi().RememberState();
1303 if (!HasEmptyFrame()) {
1304 uint32_t xmm_spill_location = GetFpuSpillStart();
1305 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1306 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1307 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1308 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1309 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1310 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1311 }
1312 }
1313
1314 int adjust = GetFrameSize() - GetCoreSpillSize();
1315 __ addq(CpuRegister(RSP), Immediate(adjust));
1316 __ cfi().AdjustCFAOffset(-adjust);
1317
1318 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1319 Register reg = kCoreCalleeSaves[i];
1320 if (allocated_registers_.ContainsCoreRegister(reg)) {
1321 __ popq(CpuRegister(reg));
1322 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1323 __ cfi().Restore(DWARFReg(reg));
1324 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001325 }
1326 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001327 __ ret();
1328 __ cfi().RestoreState();
1329 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330}
1331
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001332void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1333 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334}
1335
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336void CodeGeneratorX86_64::Move(Location destination, Location source) {
1337 if (source.Equals(destination)) {
1338 return;
1339 }
1340 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001341 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001342 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001343 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001344 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001345 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001346 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001347 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1348 } else if (source.IsConstant()) {
1349 HConstant* constant = source.GetConstant();
1350 if (constant->IsLongConstant()) {
1351 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1352 } else {
1353 Load32BitValue(dest, GetInt32ValueOf(constant));
1354 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001355 } else {
1356 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001357 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001358 }
1359 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001360 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001361 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001362 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001363 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001364 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1365 } else if (source.IsConstant()) {
1366 HConstant* constant = source.GetConstant();
1367 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1368 if (constant->IsFloatConstant()) {
1369 Load32BitValue(dest, static_cast<int32_t>(value));
1370 } else {
1371 Load64BitValue(dest, value);
1372 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001373 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001374 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001375 } else {
1376 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001377 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001378 }
1379 } else if (destination.IsStackSlot()) {
1380 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001381 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001382 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001383 } else if (source.IsFpuRegister()) {
1384 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001385 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001386 } else if (source.IsConstant()) {
1387 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001388 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001389 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001390 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001391 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001392 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1393 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001394 }
1395 } else {
1396 DCHECK(destination.IsDoubleStackSlot());
1397 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001398 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001399 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001400 } else if (source.IsFpuRegister()) {
1401 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001402 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001403 } else if (source.IsConstant()) {
1404 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001405 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1406 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001407 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001408 } else {
1409 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001410 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1411 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001412 }
1413 }
1414}
1415
Calin Juravle175dc732015-08-25 15:42:32 +01001416void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1417 DCHECK(location.IsRegister());
1418 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1419}
1420
Calin Juravlee460d1d2015-09-29 04:52:17 +01001421void CodeGeneratorX86_64::MoveLocation(
1422 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1423 Move(dst, src);
1424}
1425
1426void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1427 if (location.IsRegister()) {
1428 locations->AddTemp(location);
1429 } else {
1430 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1431 }
1432}
1433
David Brazdilfc6a86a2015-06-26 10:33:45 +00001434void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001435 DCHECK(!successor->IsExitBlock());
1436
1437 HBasicBlock* block = got->GetBlock();
1438 HInstruction* previous = got->GetPrevious();
1439
1440 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001441 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001442 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1443 return;
1444 }
1445
1446 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1447 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1448 }
1449 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001450 __ jmp(codegen_->GetLabelOf(successor));
1451 }
1452}
1453
David Brazdilfc6a86a2015-06-26 10:33:45 +00001454void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1455 got->SetLocations(nullptr);
1456}
1457
1458void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1459 HandleGoto(got, got->GetSuccessor());
1460}
1461
1462void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1463 try_boundary->SetLocations(nullptr);
1464}
1465
1466void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1467 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1468 if (!successor->IsExitBlock()) {
1469 HandleGoto(try_boundary, successor);
1470 }
1471}
1472
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001473void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1474 exit->SetLocations(nullptr);
1475}
1476
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001477void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001478}
1479
Mark Mendell152408f2015-12-31 12:28:50 -05001480template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001481void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001482 LabelType* true_label,
1483 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001484 if (cond->IsFPConditionTrueIfNaN()) {
1485 __ j(kUnordered, true_label);
1486 } else if (cond->IsFPConditionFalseIfNaN()) {
1487 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001488 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001489 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001490}
1491
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001492void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001493 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001494
Mark Mendellc4701932015-04-10 13:18:51 -04001495 Location left = locations->InAt(0);
1496 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001497 Primitive::Type type = condition->InputAt(0)->GetType();
1498 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001499 case Primitive::kPrimBoolean:
1500 case Primitive::kPrimByte:
1501 case Primitive::kPrimChar:
1502 case Primitive::kPrimShort:
1503 case Primitive::kPrimInt:
1504 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001505 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001506 break;
1507 }
Mark Mendellc4701932015-04-10 13:18:51 -04001508 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001509 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001510 break;
1511 }
1512 case Primitive::kPrimFloat: {
1513 if (right.IsFpuRegister()) {
1514 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1515 } else if (right.IsConstant()) {
1516 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1517 codegen_->LiteralFloatAddress(
1518 right.GetConstant()->AsFloatConstant()->GetValue()));
1519 } else {
1520 DCHECK(right.IsStackSlot());
1521 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1522 Address(CpuRegister(RSP), right.GetStackIndex()));
1523 }
Mark Mendellc4701932015-04-10 13:18:51 -04001524 break;
1525 }
1526 case Primitive::kPrimDouble: {
1527 if (right.IsFpuRegister()) {
1528 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1529 } else if (right.IsConstant()) {
1530 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1531 codegen_->LiteralDoubleAddress(
1532 right.GetConstant()->AsDoubleConstant()->GetValue()));
1533 } else {
1534 DCHECK(right.IsDoubleStackSlot());
1535 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1536 Address(CpuRegister(RSP), right.GetStackIndex()));
1537 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001538 break;
1539 }
1540 default:
1541 LOG(FATAL) << "Unexpected condition type " << type;
1542 }
1543}
1544
1545template<class LabelType>
1546void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1547 LabelType* true_target_in,
1548 LabelType* false_target_in) {
1549 // Generated branching requires both targets to be explicit. If either of the
1550 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1551 LabelType fallthrough_target;
1552 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1553 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1554
1555 // Generate the comparison to set the CC.
1556 GenerateCompareTest(condition);
1557
1558 // Now generate the correct jump(s).
1559 Primitive::Type type = condition->InputAt(0)->GetType();
1560 switch (type) {
1561 case Primitive::kPrimLong: {
1562 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1563 break;
1564 }
1565 case Primitive::kPrimFloat: {
1566 GenerateFPJumps(condition, true_target, false_target);
1567 break;
1568 }
1569 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001570 GenerateFPJumps(condition, true_target, false_target);
1571 break;
1572 }
1573 default:
1574 LOG(FATAL) << "Unexpected condition type " << type;
1575 }
1576
David Brazdil0debae72015-11-12 18:37:00 +00001577 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001578 __ jmp(false_target);
1579 }
David Brazdil0debae72015-11-12 18:37:00 +00001580
1581 if (fallthrough_target.IsLinked()) {
1582 __ Bind(&fallthrough_target);
1583 }
Mark Mendellc4701932015-04-10 13:18:51 -04001584}
1585
David Brazdil0debae72015-11-12 18:37:00 +00001586static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1587 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1588 // are set only strictly before `branch`. We can't use the eflags on long
1589 // conditions if they are materialized due to the complex branching.
1590 return cond->IsCondition() &&
1591 cond->GetNext() == branch &&
1592 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1593}
1594
Mark Mendell152408f2015-12-31 12:28:50 -05001595template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001596void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001597 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001598 LabelType* true_target,
1599 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001600 HInstruction* cond = instruction->InputAt(condition_input_index);
1601
1602 if (true_target == nullptr && false_target == nullptr) {
1603 // Nothing to do. The code always falls through.
1604 return;
1605 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001606 // Constant condition, statically compared against "true" (integer value 1).
1607 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001608 if (true_target != nullptr) {
1609 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001610 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001611 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001612 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001613 if (false_target != nullptr) {
1614 __ jmp(false_target);
1615 }
1616 }
1617 return;
1618 }
1619
1620 // The following code generates these patterns:
1621 // (1) true_target == nullptr && false_target != nullptr
1622 // - opposite condition true => branch to false_target
1623 // (2) true_target != nullptr && false_target == nullptr
1624 // - condition true => branch to true_target
1625 // (3) true_target != nullptr && false_target != nullptr
1626 // - condition true => branch to true_target
1627 // - branch to false_target
1628 if (IsBooleanValueOrMaterializedCondition(cond)) {
1629 if (AreEflagsSetFrom(cond, instruction)) {
1630 if (true_target == nullptr) {
1631 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1632 } else {
1633 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1634 }
1635 } else {
1636 // Materialized condition, compare against 0.
1637 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1638 if (lhs.IsRegister()) {
1639 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1640 } else {
1641 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1642 }
1643 if (true_target == nullptr) {
1644 __ j(kEqual, false_target);
1645 } else {
1646 __ j(kNotEqual, true_target);
1647 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001648 }
1649 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001650 // Condition has not been materialized, use its inputs as the
1651 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001652 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001653
David Brazdil0debae72015-11-12 18:37:00 +00001654 // If this is a long or FP comparison that has been folded into
1655 // the HCondition, generate the comparison directly.
1656 Primitive::Type type = condition->InputAt(0)->GetType();
1657 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1658 GenerateCompareTestAndBranch(condition, true_target, false_target);
1659 return;
1660 }
1661
1662 Location lhs = condition->GetLocations()->InAt(0);
1663 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001664 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001665 if (true_target == nullptr) {
1666 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1667 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001668 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001669 }
Dave Allison20dfc792014-06-16 20:44:29 -07001670 }
David Brazdil0debae72015-11-12 18:37:00 +00001671
1672 // If neither branch falls through (case 3), the conditional branch to `true_target`
1673 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1674 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001675 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001676 }
1677}
1678
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001679void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001680 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1681 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001682 locations->SetInAt(0, Location::Any());
1683 }
1684}
1685
1686void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001687 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1688 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1689 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1690 nullptr : codegen_->GetLabelOf(true_successor);
1691 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1692 nullptr : codegen_->GetLabelOf(false_successor);
1693 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001694}
1695
1696void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1697 LocationSummary* locations = new (GetGraph()->GetArena())
1698 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001699 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001700 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001701 locations->SetInAt(0, Location::Any());
1702 }
1703}
1704
1705void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001706 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001707 GenerateTestAndBranch<Label>(deoptimize,
1708 /* condition_input_index */ 0,
1709 slow_path->GetEntryLabel(),
1710 /* false_target */ nullptr);
1711}
1712
Mingyao Yang063fc772016-08-02 11:02:54 -07001713void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1714 LocationSummary* locations = new (GetGraph()->GetArena())
1715 LocationSummary(flag, LocationSummary::kNoCall);
1716 locations->SetOut(Location::RequiresRegister());
1717}
1718
1719void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1720 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1721 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1722}
1723
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001724static bool SelectCanUseCMOV(HSelect* select) {
1725 // There are no conditional move instructions for XMMs.
1726 if (Primitive::IsFloatingPointType(select->GetType())) {
1727 return false;
1728 }
1729
1730 // A FP condition doesn't generate the single CC that we need.
1731 HInstruction* condition = select->GetCondition();
1732 if (condition->IsCondition() &&
1733 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1734 return false;
1735 }
1736
1737 // We can generate a CMOV for this Select.
1738 return true;
1739}
1740
David Brazdil74eb1b22015-12-14 11:44:01 +00001741void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1742 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1743 if (Primitive::IsFloatingPointType(select->GetType())) {
1744 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001745 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001746 } else {
1747 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001748 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001749 if (select->InputAt(1)->IsConstant()) {
1750 locations->SetInAt(1, Location::RequiresRegister());
1751 } else {
1752 locations->SetInAt(1, Location::Any());
1753 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001754 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001755 locations->SetInAt(1, Location::Any());
1756 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001757 }
1758 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1759 locations->SetInAt(2, Location::RequiresRegister());
1760 }
1761 locations->SetOut(Location::SameAsFirstInput());
1762}
1763
1764void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1765 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001766 if (SelectCanUseCMOV(select)) {
1767 // If both the condition and the source types are integer, we can generate
1768 // a CMOV to implement Select.
1769 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001770 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001771 DCHECK(locations->InAt(0).Equals(locations->Out()));
1772
1773 HInstruction* select_condition = select->GetCondition();
1774 Condition cond = kNotEqual;
1775
1776 // Figure out how to test the 'condition'.
1777 if (select_condition->IsCondition()) {
1778 HCondition* condition = select_condition->AsCondition();
1779 if (!condition->IsEmittedAtUseSite()) {
1780 // This was a previously materialized condition.
1781 // Can we use the existing condition code?
1782 if (AreEflagsSetFrom(condition, select)) {
1783 // Materialization was the previous instruction. Condition codes are right.
1784 cond = X86_64IntegerCondition(condition->GetCondition());
1785 } else {
1786 // No, we have to recreate the condition code.
1787 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1788 __ testl(cond_reg, cond_reg);
1789 }
1790 } else {
1791 GenerateCompareTest(condition);
1792 cond = X86_64IntegerCondition(condition->GetCondition());
1793 }
1794 } else {
1795 // Must be a boolean condition, which needs to be compared to 0.
1796 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1797 __ testl(cond_reg, cond_reg);
1798 }
1799
1800 // If the condition is true, overwrite the output, which already contains false.
1801 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001802 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1803 if (value_true_loc.IsRegister()) {
1804 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1805 } else {
1806 __ cmov(cond,
1807 value_false,
1808 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1809 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001810 } else {
1811 NearLabel false_target;
1812 GenerateTestAndBranch<NearLabel>(select,
1813 /* condition_input_index */ 2,
1814 /* true_target */ nullptr,
1815 &false_target);
1816 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1817 __ Bind(&false_target);
1818 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001819}
1820
David Srbecky0cf44932015-12-09 14:09:59 +00001821void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1822 new (GetGraph()->GetArena()) LocationSummary(info);
1823}
1824
David Srbeckyd28f4a02016-03-14 17:14:24 +00001825void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1826 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001827}
1828
1829void CodeGeneratorX86_64::GenerateNop() {
1830 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001831}
1832
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001833void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001834 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001835 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001836 // Handle the long/FP comparisons made in instruction simplification.
1837 switch (cond->InputAt(0)->GetType()) {
1838 case Primitive::kPrimLong:
1839 locations->SetInAt(0, Location::RequiresRegister());
1840 locations->SetInAt(1, Location::Any());
1841 break;
1842 case Primitive::kPrimFloat:
1843 case Primitive::kPrimDouble:
1844 locations->SetInAt(0, Location::RequiresFpuRegister());
1845 locations->SetInAt(1, Location::Any());
1846 break;
1847 default:
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 locations->SetInAt(1, Location::Any());
1850 break;
1851 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001852 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001853 locations->SetOut(Location::RequiresRegister());
1854 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001855}
1856
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001857void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001858 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001859 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001860 }
Mark Mendellc4701932015-04-10 13:18:51 -04001861
1862 LocationSummary* locations = cond->GetLocations();
1863 Location lhs = locations->InAt(0);
1864 Location rhs = locations->InAt(1);
1865 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001866 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001867
1868 switch (cond->InputAt(0)->GetType()) {
1869 default:
1870 // Integer case.
1871
1872 // Clear output register: setcc only sets the low byte.
1873 __ xorl(reg, reg);
1874
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001875 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001876 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001877 return;
1878 case Primitive::kPrimLong:
1879 // Clear output register: setcc only sets the low byte.
1880 __ xorl(reg, reg);
1881
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001882 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001883 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001884 return;
1885 case Primitive::kPrimFloat: {
1886 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1887 if (rhs.IsConstant()) {
1888 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1889 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1890 } else if (rhs.IsStackSlot()) {
1891 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1892 } else {
1893 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1894 }
1895 GenerateFPJumps(cond, &true_label, &false_label);
1896 break;
1897 }
1898 case Primitive::kPrimDouble: {
1899 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1900 if (rhs.IsConstant()) {
1901 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1902 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1903 } else if (rhs.IsDoubleStackSlot()) {
1904 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1905 } else {
1906 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1907 }
1908 GenerateFPJumps(cond, &true_label, &false_label);
1909 break;
1910 }
1911 }
1912
1913 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001914 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001915
Roland Levillain4fa13f62015-07-06 18:11:54 +01001916 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001917 __ Bind(&false_label);
1918 __ xorl(reg, reg);
1919 __ jmp(&done_label);
1920
Roland Levillain4fa13f62015-07-06 18:11:54 +01001921 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001922 __ Bind(&true_label);
1923 __ movl(reg, Immediate(1));
1924 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001925}
1926
1927void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001928 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001929}
1930
1931void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001932 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001933}
1934
1935void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001936 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001937}
1938
1939void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001940 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001941}
1942
1943void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001944 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001945}
1946
1947void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001948 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001949}
1950
1951void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001952 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001953}
1954
1955void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001956 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001957}
1958
1959void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001960 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001961}
1962
1963void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001964 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001965}
1966
1967void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001968 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001969}
1970
1971void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001973}
1974
Aart Bike9f37602015-10-09 11:15:55 -07001975void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001976 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001977}
1978
1979void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001980 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001981}
1982
1983void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001984 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001985}
1986
1987void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001989}
1990
1991void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001992 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001993}
1994
1995void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001996 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001997}
1998
1999void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002000 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002001}
2002
2003void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002004 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002005}
2006
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002007void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002008 LocationSummary* locations =
2009 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002010 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002011 case Primitive::kPrimBoolean:
2012 case Primitive::kPrimByte:
2013 case Primitive::kPrimShort:
2014 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002015 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002016 case Primitive::kPrimLong: {
2017 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002018 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002019 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2020 break;
2021 }
2022 case Primitive::kPrimFloat:
2023 case Primitive::kPrimDouble: {
2024 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002025 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002026 locations->SetOut(Location::RequiresRegister());
2027 break;
2028 }
2029 default:
2030 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2031 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002032}
2033
2034void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002035 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002036 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002037 Location left = locations->InAt(0);
2038 Location right = locations->InAt(1);
2039
Mark Mendell0c9497d2015-08-21 09:30:05 -04002040 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002041 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002042 Condition less_cond = kLess;
2043
Calin Juravleddb7df22014-11-25 20:56:51 +00002044 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002045 case Primitive::kPrimBoolean:
2046 case Primitive::kPrimByte:
2047 case Primitive::kPrimShort:
2048 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002049 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002050 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002051 break;
2052 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002053 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002054 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002055 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002056 }
2057 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002058 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2059 if (right.IsConstant()) {
2060 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2061 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2062 } else if (right.IsStackSlot()) {
2063 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2064 } else {
2065 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2066 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002067 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002068 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002069 break;
2070 }
2071 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002072 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2073 if (right.IsConstant()) {
2074 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2075 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2076 } else if (right.IsDoubleStackSlot()) {
2077 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2078 } else {
2079 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2080 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002081 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002082 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002083 break;
2084 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002085 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002086 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002087 }
Aart Bika19616e2016-02-01 18:57:58 -08002088
Calin Juravleddb7df22014-11-25 20:56:51 +00002089 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002090 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002091 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002092
Calin Juravle91debbc2014-11-26 19:01:09 +00002093 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002094 __ movl(out, Immediate(1));
2095 __ jmp(&done);
2096
2097 __ Bind(&less);
2098 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002099
2100 __ Bind(&done);
2101}
2102
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002103void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002104 LocationSummary* locations =
2105 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002106 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002107}
2108
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002109void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002110 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002111}
2112
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002113void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2114 LocationSummary* locations =
2115 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2116 locations->SetOut(Location::ConstantLocation(constant));
2117}
2118
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002119void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002120 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002121}
2122
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002123void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002124 LocationSummary* locations =
2125 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002126 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002127}
2128
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002129void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002130 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002131}
2132
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002133void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2134 LocationSummary* locations =
2135 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2136 locations->SetOut(Location::ConstantLocation(constant));
2137}
2138
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002139void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002140 // Will be generated at use site.
2141}
2142
2143void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2144 LocationSummary* locations =
2145 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2146 locations->SetOut(Location::ConstantLocation(constant));
2147}
2148
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002149void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2150 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002151 // Will be generated at use site.
2152}
2153
Calin Juravle27df7582015-04-17 19:12:31 +01002154void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2155 memory_barrier->SetLocations(nullptr);
2156}
2157
2158void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002159 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002160}
2161
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002162void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2163 ret->SetLocations(nullptr);
2164}
2165
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002166void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002167 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002168}
2169
2170void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002171 LocationSummary* locations =
2172 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002173 switch (ret->InputAt(0)->GetType()) {
2174 case Primitive::kPrimBoolean:
2175 case Primitive::kPrimByte:
2176 case Primitive::kPrimChar:
2177 case Primitive::kPrimShort:
2178 case Primitive::kPrimInt:
2179 case Primitive::kPrimNot:
2180 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002181 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002182 break;
2183
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002184 case Primitive::kPrimFloat:
2185 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002186 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002187 break;
2188
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002189 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002190 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002191 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002192}
2193
2194void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2195 if (kIsDebugBuild) {
2196 switch (ret->InputAt(0)->GetType()) {
2197 case Primitive::kPrimBoolean:
2198 case Primitive::kPrimByte:
2199 case Primitive::kPrimChar:
2200 case Primitive::kPrimShort:
2201 case Primitive::kPrimInt:
2202 case Primitive::kPrimNot:
2203 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002204 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002205 break;
2206
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002207 case Primitive::kPrimFloat:
2208 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002209 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002210 XMM0);
2211 break;
2212
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002213 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002214 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002215 }
2216 }
2217 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218}
2219
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002220Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2221 switch (type) {
2222 case Primitive::kPrimBoolean:
2223 case Primitive::kPrimByte:
2224 case Primitive::kPrimChar:
2225 case Primitive::kPrimShort:
2226 case Primitive::kPrimInt:
2227 case Primitive::kPrimNot:
2228 case Primitive::kPrimLong:
2229 return Location::RegisterLocation(RAX);
2230
2231 case Primitive::kPrimVoid:
2232 return Location::NoLocation();
2233
2234 case Primitive::kPrimDouble:
2235 case Primitive::kPrimFloat:
2236 return Location::FpuRegisterLocation(XMM0);
2237 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002238
2239 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002240}
2241
2242Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2243 return Location::RegisterLocation(kMethodRegisterArgument);
2244}
2245
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002246Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002247 switch (type) {
2248 case Primitive::kPrimBoolean:
2249 case Primitive::kPrimByte:
2250 case Primitive::kPrimChar:
2251 case Primitive::kPrimShort:
2252 case Primitive::kPrimInt:
2253 case Primitive::kPrimNot: {
2254 uint32_t index = gp_index_++;
2255 stack_index_++;
2256 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002257 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002258 } else {
2259 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2260 }
2261 }
2262
2263 case Primitive::kPrimLong: {
2264 uint32_t index = gp_index_;
2265 stack_index_ += 2;
2266 if (index < calling_convention.GetNumberOfRegisters()) {
2267 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002268 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002269 } else {
2270 gp_index_ += 2;
2271 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2272 }
2273 }
2274
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002275 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002276 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002277 stack_index_++;
2278 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002279 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002280 } else {
2281 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2282 }
2283 }
2284
2285 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002286 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002287 stack_index_ += 2;
2288 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002289 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002290 } else {
2291 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2292 }
2293 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002294
2295 case Primitive::kPrimVoid:
2296 LOG(FATAL) << "Unexpected parameter type " << type;
2297 break;
2298 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002299 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002300}
2301
Calin Juravle175dc732015-08-25 15:42:32 +01002302void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2303 // The trampoline uses the same calling convention as dex calling conventions,
2304 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2305 // the method_idx.
2306 HandleInvoke(invoke);
2307}
2308
2309void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2310 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2311}
2312
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002313void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002314 // Explicit clinit checks triggered by static invokes must have been pruned by
2315 // art::PrepareForRegisterAllocation.
2316 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002317
Mark Mendellfb8d2792015-03-31 22:16:59 -04002318 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002319 if (intrinsic.TryDispatch(invoke)) {
2320 return;
2321 }
2322
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002323 HandleInvoke(invoke);
2324}
2325
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002326static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2327 if (invoke->GetLocations()->Intrinsified()) {
2328 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2329 intrinsic.Dispatch(invoke);
2330 return true;
2331 }
2332 return false;
2333}
2334
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002335void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002336 // Explicit clinit checks triggered by static invokes must have been pruned by
2337 // art::PrepareForRegisterAllocation.
2338 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002339
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002340 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2341 return;
2342 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002343
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002344 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002345 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002346 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002347 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002348}
2349
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002350void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002351 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002352 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002353}
2354
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002355void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002356 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002357 if (intrinsic.TryDispatch(invoke)) {
2358 return;
2359 }
2360
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002361 HandleInvoke(invoke);
2362}
2363
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002364void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002365 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2366 return;
2367 }
2368
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002369 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002370 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002371 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002372}
2373
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002374void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2375 HandleInvoke(invoke);
2376 // Add the hidden argument.
2377 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2378}
2379
2380void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2381 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002382 LocationSummary* locations = invoke->GetLocations();
2383 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2384 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002385 Location receiver = locations->InAt(0);
2386 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2387
Roland Levillain0d5a2812015-11-13 10:07:31 +00002388 // Set the hidden argument. This is safe to do this here, as RAX
2389 // won't be modified thereafter, before the `call` instruction.
2390 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002391 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002392
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002393 if (receiver.IsStackSlot()) {
2394 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002395 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002396 __ movl(temp, Address(temp, class_offset));
2397 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002398 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002399 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002400 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002401 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002402 // Instead of simply (possibly) unpoisoning `temp` here, we should
2403 // emit a read barrier for the previous class reference load.
2404 // However this is not required in practice, as this is an
2405 // intermediate/temporary reference and because the current
2406 // concurrent copying collector keeps the from-space memory
2407 // intact/accessible until the end of the marking phase (the
2408 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002409 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002410 // temp = temp->GetAddressOfIMT()
2411 __ movq(temp,
2412 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2413 // temp = temp->GetImtEntryAt(method_offset);
2414 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002415 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002416 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002417 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002418 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002419 __ call(Address(
2420 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002421
2422 DCHECK(!codegen_->IsLeafMethod());
2423 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2424}
2425
Roland Levillain88cb1752014-10-20 16:36:47 +01002426void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2427 LocationSummary* locations =
2428 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2429 switch (neg->GetResultType()) {
2430 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002431 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002432 locations->SetInAt(0, Location::RequiresRegister());
2433 locations->SetOut(Location::SameAsFirstInput());
2434 break;
2435
Roland Levillain88cb1752014-10-20 16:36:47 +01002436 case Primitive::kPrimFloat:
2437 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002438 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002439 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002440 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002441 break;
2442
2443 default:
2444 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2445 }
2446}
2447
2448void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2449 LocationSummary* locations = neg->GetLocations();
2450 Location out = locations->Out();
2451 Location in = locations->InAt(0);
2452 switch (neg->GetResultType()) {
2453 case Primitive::kPrimInt:
2454 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002455 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002456 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002457 break;
2458
2459 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002460 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002461 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002462 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002463 break;
2464
Roland Levillain5368c212014-11-27 15:03:41 +00002465 case Primitive::kPrimFloat: {
2466 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002467 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002468 // Implement float negation with an exclusive or with value
2469 // 0x80000000 (mask for bit 31, representing the sign of a
2470 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002471 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002472 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002473 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002474 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002475
Roland Levillain5368c212014-11-27 15:03:41 +00002476 case Primitive::kPrimDouble: {
2477 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002478 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002479 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002480 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002481 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002482 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002483 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002484 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002485 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002486
2487 default:
2488 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2489 }
2490}
2491
Roland Levillaindff1f282014-11-05 14:15:05 +00002492void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2493 LocationSummary* locations =
2494 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2495 Primitive::Type result_type = conversion->GetResultType();
2496 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002497 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002498
David Brazdilb2bd1c52015-03-25 11:17:37 +00002499 // The Java language does not allow treating boolean as an integral type but
2500 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002501
Roland Levillaindff1f282014-11-05 14:15:05 +00002502 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002503 case Primitive::kPrimByte:
2504 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002505 case Primitive::kPrimLong:
2506 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002507 case Primitive::kPrimBoolean:
2508 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002509 case Primitive::kPrimShort:
2510 case Primitive::kPrimInt:
2511 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002512 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002513 locations->SetInAt(0, Location::Any());
2514 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2515 break;
2516
2517 default:
2518 LOG(FATAL) << "Unexpected type conversion from " << input_type
2519 << " to " << result_type;
2520 }
2521 break;
2522
Roland Levillain01a8d712014-11-14 16:27:39 +00002523 case Primitive::kPrimShort:
2524 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002525 case Primitive::kPrimLong:
2526 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002527 case Primitive::kPrimBoolean:
2528 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002529 case Primitive::kPrimByte:
2530 case Primitive::kPrimInt:
2531 case Primitive::kPrimChar:
2532 // Processing a Dex `int-to-short' instruction.
2533 locations->SetInAt(0, Location::Any());
2534 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2535 break;
2536
2537 default:
2538 LOG(FATAL) << "Unexpected type conversion from " << input_type
2539 << " to " << result_type;
2540 }
2541 break;
2542
Roland Levillain946e1432014-11-11 17:35:19 +00002543 case Primitive::kPrimInt:
2544 switch (input_type) {
2545 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002546 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002547 locations->SetInAt(0, Location::Any());
2548 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2549 break;
2550
2551 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002552 // Processing a Dex `float-to-int' instruction.
2553 locations->SetInAt(0, Location::RequiresFpuRegister());
2554 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002555 break;
2556
Roland Levillain946e1432014-11-11 17:35:19 +00002557 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002558 // Processing a Dex `double-to-int' instruction.
2559 locations->SetInAt(0, Location::RequiresFpuRegister());
2560 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002561 break;
2562
2563 default:
2564 LOG(FATAL) << "Unexpected type conversion from " << input_type
2565 << " to " << result_type;
2566 }
2567 break;
2568
Roland Levillaindff1f282014-11-05 14:15:05 +00002569 case Primitive::kPrimLong:
2570 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002571 case Primitive::kPrimBoolean:
2572 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002573 case Primitive::kPrimByte:
2574 case Primitive::kPrimShort:
2575 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002576 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002577 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002578 // TODO: We would benefit from a (to-be-implemented)
2579 // Location::RegisterOrStackSlot requirement for this input.
2580 locations->SetInAt(0, Location::RequiresRegister());
2581 locations->SetOut(Location::RequiresRegister());
2582 break;
2583
2584 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002585 // Processing a Dex `float-to-long' instruction.
2586 locations->SetInAt(0, Location::RequiresFpuRegister());
2587 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002588 break;
2589
Roland Levillaindff1f282014-11-05 14:15:05 +00002590 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002591 // Processing a Dex `double-to-long' instruction.
2592 locations->SetInAt(0, Location::RequiresFpuRegister());
2593 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002594 break;
2595
2596 default:
2597 LOG(FATAL) << "Unexpected type conversion from " << input_type
2598 << " to " << result_type;
2599 }
2600 break;
2601
Roland Levillain981e4542014-11-14 11:47:14 +00002602 case Primitive::kPrimChar:
2603 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002604 case Primitive::kPrimLong:
2605 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002606 case Primitive::kPrimBoolean:
2607 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002608 case Primitive::kPrimByte:
2609 case Primitive::kPrimShort:
2610 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002611 // Processing a Dex `int-to-char' instruction.
2612 locations->SetInAt(0, Location::Any());
2613 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2614 break;
2615
2616 default:
2617 LOG(FATAL) << "Unexpected type conversion from " << input_type
2618 << " to " << result_type;
2619 }
2620 break;
2621
Roland Levillaindff1f282014-11-05 14:15:05 +00002622 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002623 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002624 case Primitive::kPrimBoolean:
2625 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002626 case Primitive::kPrimByte:
2627 case Primitive::kPrimShort:
2628 case Primitive::kPrimInt:
2629 case Primitive::kPrimChar:
2630 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002631 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002632 locations->SetOut(Location::RequiresFpuRegister());
2633 break;
2634
2635 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002636 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002637 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002638 locations->SetOut(Location::RequiresFpuRegister());
2639 break;
2640
Roland Levillaincff13742014-11-17 14:32:17 +00002641 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002642 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002643 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002644 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002645 break;
2646
2647 default:
2648 LOG(FATAL) << "Unexpected type conversion from " << input_type
2649 << " to " << result_type;
2650 };
2651 break;
2652
Roland Levillaindff1f282014-11-05 14:15:05 +00002653 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002654 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002655 case Primitive::kPrimBoolean:
2656 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002657 case Primitive::kPrimByte:
2658 case Primitive::kPrimShort:
2659 case Primitive::kPrimInt:
2660 case Primitive::kPrimChar:
2661 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002662 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002663 locations->SetOut(Location::RequiresFpuRegister());
2664 break;
2665
2666 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002667 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002668 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002669 locations->SetOut(Location::RequiresFpuRegister());
2670 break;
2671
Roland Levillaincff13742014-11-17 14:32:17 +00002672 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002673 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002674 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002675 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002676 break;
2677
2678 default:
2679 LOG(FATAL) << "Unexpected type conversion from " << input_type
2680 << " to " << result_type;
2681 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002682 break;
2683
2684 default:
2685 LOG(FATAL) << "Unexpected type conversion from " << input_type
2686 << " to " << result_type;
2687 }
2688}
2689
2690void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2691 LocationSummary* locations = conversion->GetLocations();
2692 Location out = locations->Out();
2693 Location in = locations->InAt(0);
2694 Primitive::Type result_type = conversion->GetResultType();
2695 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002696 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002697 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002698 case Primitive::kPrimByte:
2699 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002700 case Primitive::kPrimLong:
2701 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002702 case Primitive::kPrimBoolean:
2703 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002704 case Primitive::kPrimShort:
2705 case Primitive::kPrimInt:
2706 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002707 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002708 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002709 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002710 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002711 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002712 Address(CpuRegister(RSP), in.GetStackIndex()));
2713 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002714 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002715 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002716 }
2717 break;
2718
2719 default:
2720 LOG(FATAL) << "Unexpected type conversion from " << input_type
2721 << " to " << result_type;
2722 }
2723 break;
2724
Roland Levillain01a8d712014-11-14 16:27:39 +00002725 case Primitive::kPrimShort:
2726 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002727 case Primitive::kPrimLong:
2728 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002729 case Primitive::kPrimBoolean:
2730 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002731 case Primitive::kPrimByte:
2732 case Primitive::kPrimInt:
2733 case Primitive::kPrimChar:
2734 // Processing a Dex `int-to-short' instruction.
2735 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002736 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002737 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002738 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002739 Address(CpuRegister(RSP), in.GetStackIndex()));
2740 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002741 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002742 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002743 }
2744 break;
2745
2746 default:
2747 LOG(FATAL) << "Unexpected type conversion from " << input_type
2748 << " to " << result_type;
2749 }
2750 break;
2751
Roland Levillain946e1432014-11-11 17:35:19 +00002752 case Primitive::kPrimInt:
2753 switch (input_type) {
2754 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002755 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002756 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002757 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002758 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002759 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002760 Address(CpuRegister(RSP), in.GetStackIndex()));
2761 } else {
2762 DCHECK(in.IsConstant());
2763 DCHECK(in.GetConstant()->IsLongConstant());
2764 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002765 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002766 }
2767 break;
2768
Roland Levillain3f8f9362014-12-02 17:45:01 +00002769 case Primitive::kPrimFloat: {
2770 // Processing a Dex `float-to-int' instruction.
2771 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2772 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002773 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002774
2775 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002776 // if input >= (float)INT_MAX goto done
2777 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002778 __ j(kAboveEqual, &done);
2779 // if input == NaN goto nan
2780 __ j(kUnordered, &nan);
2781 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002782 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002783 __ jmp(&done);
2784 __ Bind(&nan);
2785 // output = 0
2786 __ xorl(output, output);
2787 __ Bind(&done);
2788 break;
2789 }
2790
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002791 case Primitive::kPrimDouble: {
2792 // Processing a Dex `double-to-int' instruction.
2793 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2794 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002795 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002796
2797 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002798 // if input >= (double)INT_MAX goto done
2799 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002800 __ j(kAboveEqual, &done);
2801 // if input == NaN goto nan
2802 __ j(kUnordered, &nan);
2803 // output = double-to-int-truncate(input)
2804 __ cvttsd2si(output, input);
2805 __ jmp(&done);
2806 __ Bind(&nan);
2807 // output = 0
2808 __ xorl(output, output);
2809 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002810 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002811 }
Roland Levillain946e1432014-11-11 17:35:19 +00002812
2813 default:
2814 LOG(FATAL) << "Unexpected type conversion from " << input_type
2815 << " to " << result_type;
2816 }
2817 break;
2818
Roland Levillaindff1f282014-11-05 14:15:05 +00002819 case Primitive::kPrimLong:
2820 switch (input_type) {
2821 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002822 case Primitive::kPrimBoolean:
2823 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002824 case Primitive::kPrimByte:
2825 case Primitive::kPrimShort:
2826 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002827 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002828 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002829 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002830 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002831 break;
2832
Roland Levillain624279f2014-12-04 11:54:28 +00002833 case Primitive::kPrimFloat: {
2834 // Processing a Dex `float-to-long' instruction.
2835 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2836 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002837 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002838
Mark Mendell92e83bf2015-05-07 11:25:03 -04002839 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002840 // if input >= (float)LONG_MAX goto done
2841 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002842 __ j(kAboveEqual, &done);
2843 // if input == NaN goto nan
2844 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002845 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002846 __ cvttss2si(output, input, true);
2847 __ jmp(&done);
2848 __ Bind(&nan);
2849 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002850 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002851 __ Bind(&done);
2852 break;
2853 }
2854
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002855 case Primitive::kPrimDouble: {
2856 // Processing a Dex `double-to-long' instruction.
2857 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2858 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002859 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002860
Mark Mendell92e83bf2015-05-07 11:25:03 -04002861 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002862 // if input >= (double)LONG_MAX goto done
2863 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002864 __ j(kAboveEqual, &done);
2865 // if input == NaN goto nan
2866 __ j(kUnordered, &nan);
2867 // output = double-to-long-truncate(input)
2868 __ cvttsd2si(output, input, true);
2869 __ jmp(&done);
2870 __ Bind(&nan);
2871 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002872 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002873 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002874 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002875 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002876
2877 default:
2878 LOG(FATAL) << "Unexpected type conversion from " << input_type
2879 << " to " << result_type;
2880 }
2881 break;
2882
Roland Levillain981e4542014-11-14 11:47:14 +00002883 case Primitive::kPrimChar:
2884 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002885 case Primitive::kPrimLong:
2886 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002887 case Primitive::kPrimBoolean:
2888 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002889 case Primitive::kPrimByte:
2890 case Primitive::kPrimShort:
2891 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002892 // Processing a Dex `int-to-char' instruction.
2893 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002894 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002895 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002896 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002897 Address(CpuRegister(RSP), in.GetStackIndex()));
2898 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002899 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002900 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002901 }
2902 break;
2903
2904 default:
2905 LOG(FATAL) << "Unexpected type conversion from " << input_type
2906 << " to " << result_type;
2907 }
2908 break;
2909
Roland Levillaindff1f282014-11-05 14:15:05 +00002910 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002911 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002912 case Primitive::kPrimBoolean:
2913 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002914 case Primitive::kPrimByte:
2915 case Primitive::kPrimShort:
2916 case Primitive::kPrimInt:
2917 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002918 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002919 if (in.IsRegister()) {
2920 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2921 } else if (in.IsConstant()) {
2922 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2923 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002924 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002925 } else {
2926 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2927 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2928 }
Roland Levillaincff13742014-11-17 14:32:17 +00002929 break;
2930
2931 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002932 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002933 if (in.IsRegister()) {
2934 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2935 } else if (in.IsConstant()) {
2936 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2937 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002938 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002939 } else {
2940 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2941 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2942 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002943 break;
2944
Roland Levillaincff13742014-11-17 14:32:17 +00002945 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002946 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002947 if (in.IsFpuRegister()) {
2948 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2949 } else if (in.IsConstant()) {
2950 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2951 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002952 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002953 } else {
2954 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2955 Address(CpuRegister(RSP), in.GetStackIndex()));
2956 }
Roland Levillaincff13742014-11-17 14:32:17 +00002957 break;
2958
2959 default:
2960 LOG(FATAL) << "Unexpected type conversion from " << input_type
2961 << " to " << result_type;
2962 };
2963 break;
2964
Roland Levillaindff1f282014-11-05 14:15:05 +00002965 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002966 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002967 case Primitive::kPrimBoolean:
2968 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002969 case Primitive::kPrimByte:
2970 case Primitive::kPrimShort:
2971 case Primitive::kPrimInt:
2972 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002973 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002974 if (in.IsRegister()) {
2975 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2976 } else if (in.IsConstant()) {
2977 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2978 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002979 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002980 } else {
2981 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2982 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2983 }
Roland Levillaincff13742014-11-17 14:32:17 +00002984 break;
2985
2986 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002987 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002988 if (in.IsRegister()) {
2989 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2990 } else if (in.IsConstant()) {
2991 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2992 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002993 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002994 } else {
2995 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2996 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2997 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002998 break;
2999
Roland Levillaincff13742014-11-17 14:32:17 +00003000 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003001 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003002 if (in.IsFpuRegister()) {
3003 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3004 } else if (in.IsConstant()) {
3005 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3006 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003007 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003008 } else {
3009 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3010 Address(CpuRegister(RSP), in.GetStackIndex()));
3011 }
Roland Levillaincff13742014-11-17 14:32:17 +00003012 break;
3013
3014 default:
3015 LOG(FATAL) << "Unexpected type conversion from " << input_type
3016 << " to " << result_type;
3017 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003018 break;
3019
3020 default:
3021 LOG(FATAL) << "Unexpected type conversion from " << input_type
3022 << " to " << result_type;
3023 }
3024}
3025
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003026void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003027 LocationSummary* locations =
3028 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003029 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003030 case Primitive::kPrimInt: {
3031 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003032 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3033 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003034 break;
3035 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003036
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003037 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003038 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003039 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003040 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003042 break;
3043 }
3044
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003045 case Primitive::kPrimDouble:
3046 case Primitive::kPrimFloat: {
3047 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003048 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003049 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003050 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003051 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003052
3053 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003054 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003055 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003056}
3057
3058void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3059 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003060 Location first = locations->InAt(0);
3061 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003062 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003063
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003065 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003066 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003067 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3068 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003069 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3070 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003071 } else {
3072 __ leal(out.AsRegister<CpuRegister>(), Address(
3073 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3074 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003075 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003076 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3077 __ addl(out.AsRegister<CpuRegister>(),
3078 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3079 } else {
3080 __ leal(out.AsRegister<CpuRegister>(), Address(
3081 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3082 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003083 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003084 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003085 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003086 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003087 break;
3088 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003089
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003090 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003091 if (second.IsRegister()) {
3092 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3093 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003094 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3095 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003096 } else {
3097 __ leaq(out.AsRegister<CpuRegister>(), Address(
3098 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3099 }
3100 } else {
3101 DCHECK(second.IsConstant());
3102 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3103 int32_t int32_value = Low32Bits(value);
3104 DCHECK_EQ(int32_value, value);
3105 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3106 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3107 } else {
3108 __ leaq(out.AsRegister<CpuRegister>(), Address(
3109 first.AsRegister<CpuRegister>(), int32_value));
3110 }
3111 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003112 break;
3113 }
3114
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003115 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003116 if (second.IsFpuRegister()) {
3117 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3118 } else if (second.IsConstant()) {
3119 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003120 codegen_->LiteralFloatAddress(
3121 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003122 } else {
3123 DCHECK(second.IsStackSlot());
3124 __ addss(first.AsFpuRegister<XmmRegister>(),
3125 Address(CpuRegister(RSP), second.GetStackIndex()));
3126 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003127 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003128 }
3129
3130 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003131 if (second.IsFpuRegister()) {
3132 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3133 } else if (second.IsConstant()) {
3134 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003135 codegen_->LiteralDoubleAddress(
3136 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003137 } else {
3138 DCHECK(second.IsDoubleStackSlot());
3139 __ addsd(first.AsFpuRegister<XmmRegister>(),
3140 Address(CpuRegister(RSP), second.GetStackIndex()));
3141 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003142 break;
3143 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003144
3145 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003146 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003147 }
3148}
3149
3150void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003151 LocationSummary* locations =
3152 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003153 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003154 case Primitive::kPrimInt: {
3155 locations->SetInAt(0, Location::RequiresRegister());
3156 locations->SetInAt(1, Location::Any());
3157 locations->SetOut(Location::SameAsFirstInput());
3158 break;
3159 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003160 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003161 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003162 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003163 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003164 break;
3165 }
Calin Juravle11351682014-10-23 15:38:15 +01003166 case Primitive::kPrimFloat:
3167 case Primitive::kPrimDouble: {
3168 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003169 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003170 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003171 break;
Calin Juravle11351682014-10-23 15:38:15 +01003172 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003173 default:
Calin Juravle11351682014-10-23 15:38:15 +01003174 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003175 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003176}
3177
3178void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3179 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003180 Location first = locations->InAt(0);
3181 Location second = locations->InAt(1);
3182 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003183 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003184 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003185 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003186 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003187 } else if (second.IsConstant()) {
3188 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003189 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003190 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003191 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003192 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003193 break;
3194 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003195 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003196 if (second.IsConstant()) {
3197 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3198 DCHECK(IsInt<32>(value));
3199 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3200 } else {
3201 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3202 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003203 break;
3204 }
3205
Calin Juravle11351682014-10-23 15:38:15 +01003206 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003207 if (second.IsFpuRegister()) {
3208 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3209 } else if (second.IsConstant()) {
3210 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003211 codegen_->LiteralFloatAddress(
3212 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003213 } else {
3214 DCHECK(second.IsStackSlot());
3215 __ subss(first.AsFpuRegister<XmmRegister>(),
3216 Address(CpuRegister(RSP), second.GetStackIndex()));
3217 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003218 break;
Calin Juravle11351682014-10-23 15:38:15 +01003219 }
3220
3221 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003222 if (second.IsFpuRegister()) {
3223 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3224 } else if (second.IsConstant()) {
3225 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003226 codegen_->LiteralDoubleAddress(
3227 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003228 } else {
3229 DCHECK(second.IsDoubleStackSlot());
3230 __ subsd(first.AsFpuRegister<XmmRegister>(),
3231 Address(CpuRegister(RSP), second.GetStackIndex()));
3232 }
Calin Juravle11351682014-10-23 15:38:15 +01003233 break;
3234 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003235
3236 default:
Calin Juravle11351682014-10-23 15:38:15 +01003237 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003238 }
3239}
3240
Calin Juravle34bacdf2014-10-07 20:23:36 +01003241void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3242 LocationSummary* locations =
3243 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3244 switch (mul->GetResultType()) {
3245 case Primitive::kPrimInt: {
3246 locations->SetInAt(0, Location::RequiresRegister());
3247 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003248 if (mul->InputAt(1)->IsIntConstant()) {
3249 // Can use 3 operand multiply.
3250 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3251 } else {
3252 locations->SetOut(Location::SameAsFirstInput());
3253 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003254 break;
3255 }
3256 case Primitive::kPrimLong: {
3257 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003258 locations->SetInAt(1, Location::Any());
3259 if (mul->InputAt(1)->IsLongConstant() &&
3260 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003261 // Can use 3 operand multiply.
3262 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3263 } else {
3264 locations->SetOut(Location::SameAsFirstInput());
3265 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266 break;
3267 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003268 case Primitive::kPrimFloat:
3269 case Primitive::kPrimDouble: {
3270 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003271 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003272 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003273 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003274 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003275
3276 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003277 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003278 }
3279}
3280
3281void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3282 LocationSummary* locations = mul->GetLocations();
3283 Location first = locations->InAt(0);
3284 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003285 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003286 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003287 case Primitive::kPrimInt:
3288 // The constant may have ended up in a register, so test explicitly to avoid
3289 // problems where the output may not be the same as the first operand.
3290 if (mul->InputAt(1)->IsIntConstant()) {
3291 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3292 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3293 } else if (second.IsRegister()) {
3294 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003295 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003296 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003297 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003298 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003299 __ imull(first.AsRegister<CpuRegister>(),
3300 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003301 }
3302 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003303 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003304 // The constant may have ended up in a register, so test explicitly to avoid
3305 // problems where the output may not be the same as the first operand.
3306 if (mul->InputAt(1)->IsLongConstant()) {
3307 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3308 if (IsInt<32>(value)) {
3309 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3310 Immediate(static_cast<int32_t>(value)));
3311 } else {
3312 // Have to use the constant area.
3313 DCHECK(first.Equals(out));
3314 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3315 }
3316 } else if (second.IsRegister()) {
3317 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003318 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003319 } else {
3320 DCHECK(second.IsDoubleStackSlot());
3321 DCHECK(first.Equals(out));
3322 __ imulq(first.AsRegister<CpuRegister>(),
3323 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003324 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003325 break;
3326 }
3327
Calin Juravleb5bfa962014-10-21 18:02:24 +01003328 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003329 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003330 if (second.IsFpuRegister()) {
3331 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3332 } else if (second.IsConstant()) {
3333 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003334 codegen_->LiteralFloatAddress(
3335 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003336 } else {
3337 DCHECK(second.IsStackSlot());
3338 __ mulss(first.AsFpuRegister<XmmRegister>(),
3339 Address(CpuRegister(RSP), second.GetStackIndex()));
3340 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003341 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003342 }
3343
3344 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003345 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003346 if (second.IsFpuRegister()) {
3347 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3348 } else if (second.IsConstant()) {
3349 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003350 codegen_->LiteralDoubleAddress(
3351 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003352 } else {
3353 DCHECK(second.IsDoubleStackSlot());
3354 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3355 Address(CpuRegister(RSP), second.GetStackIndex()));
3356 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003357 break;
3358 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003359
3360 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003361 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003362 }
3363}
3364
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003365void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3366 uint32_t stack_adjustment, bool is_float) {
3367 if (source.IsStackSlot()) {
3368 DCHECK(is_float);
3369 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3370 } else if (source.IsDoubleStackSlot()) {
3371 DCHECK(!is_float);
3372 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3373 } else {
3374 // Write the value to the temporary location on the stack and load to FP stack.
3375 if (is_float) {
3376 Location stack_temp = Location::StackSlot(temp_offset);
3377 codegen_->Move(stack_temp, source);
3378 __ flds(Address(CpuRegister(RSP), temp_offset));
3379 } else {
3380 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3381 codegen_->Move(stack_temp, source);
3382 __ fldl(Address(CpuRegister(RSP), temp_offset));
3383 }
3384 }
3385}
3386
3387void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3388 Primitive::Type type = rem->GetResultType();
3389 bool is_float = type == Primitive::kPrimFloat;
3390 size_t elem_size = Primitive::ComponentSize(type);
3391 LocationSummary* locations = rem->GetLocations();
3392 Location first = locations->InAt(0);
3393 Location second = locations->InAt(1);
3394 Location out = locations->Out();
3395
3396 // Create stack space for 2 elements.
3397 // TODO: enhance register allocator to ask for stack temporaries.
3398 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3399
3400 // Load the values to the FP stack in reverse order, using temporaries if needed.
3401 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3402 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3403
3404 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003405 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003406 __ Bind(&retry);
3407 __ fprem();
3408
3409 // Move FP status to AX.
3410 __ fstsw();
3411
3412 // And see if the argument reduction is complete. This is signaled by the
3413 // C2 FPU flag bit set to 0.
3414 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3415 __ j(kNotEqual, &retry);
3416
3417 // We have settled on the final value. Retrieve it into an XMM register.
3418 // Store FP top of stack to real stack.
3419 if (is_float) {
3420 __ fsts(Address(CpuRegister(RSP), 0));
3421 } else {
3422 __ fstl(Address(CpuRegister(RSP), 0));
3423 }
3424
3425 // Pop the 2 items from the FP stack.
3426 __ fucompp();
3427
3428 // Load the value from the stack into an XMM register.
3429 DCHECK(out.IsFpuRegister()) << out;
3430 if (is_float) {
3431 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3432 } else {
3433 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3434 }
3435
3436 // And remove the temporary stack space we allocated.
3437 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3438}
3439
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003440void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3441 DCHECK(instruction->IsDiv() || instruction->IsRem());
3442
3443 LocationSummary* locations = instruction->GetLocations();
3444 Location second = locations->InAt(1);
3445 DCHECK(second.IsConstant());
3446
3447 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3448 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003449 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003450
3451 DCHECK(imm == 1 || imm == -1);
3452
3453 switch (instruction->GetResultType()) {
3454 case Primitive::kPrimInt: {
3455 if (instruction->IsRem()) {
3456 __ xorl(output_register, output_register);
3457 } else {
3458 __ movl(output_register, input_register);
3459 if (imm == -1) {
3460 __ negl(output_register);
3461 }
3462 }
3463 break;
3464 }
3465
3466 case Primitive::kPrimLong: {
3467 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003468 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003469 } else {
3470 __ movq(output_register, input_register);
3471 if (imm == -1) {
3472 __ negq(output_register);
3473 }
3474 }
3475 break;
3476 }
3477
3478 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003479 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003480 }
3481}
3482
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003483void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003484 LocationSummary* locations = instruction->GetLocations();
3485 Location second = locations->InAt(1);
3486
3487 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3488 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3489
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003490 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003491 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3492 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003493
3494 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3495
3496 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003497 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003498 __ testl(numerator, numerator);
3499 __ cmov(kGreaterEqual, tmp, numerator);
3500 int shift = CTZ(imm);
3501 __ sarl(tmp, Immediate(shift));
3502
3503 if (imm < 0) {
3504 __ negl(tmp);
3505 }
3506
3507 __ movl(output_register, tmp);
3508 } else {
3509 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3510 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3511
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003512 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003513 __ addq(rdx, numerator);
3514 __ testq(numerator, numerator);
3515 __ cmov(kGreaterEqual, rdx, numerator);
3516 int shift = CTZ(imm);
3517 __ sarq(rdx, Immediate(shift));
3518
3519 if (imm < 0) {
3520 __ negq(rdx);
3521 }
3522
3523 __ movq(output_register, rdx);
3524 }
3525}
3526
3527void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3528 DCHECK(instruction->IsDiv() || instruction->IsRem());
3529
3530 LocationSummary* locations = instruction->GetLocations();
3531 Location second = locations->InAt(1);
3532
3533 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3534 : locations->GetTemp(0).AsRegister<CpuRegister>();
3535 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3536 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3537 : locations->Out().AsRegister<CpuRegister>();
3538 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3539
3540 DCHECK_EQ(RAX, eax.AsRegister());
3541 DCHECK_EQ(RDX, edx.AsRegister());
3542 if (instruction->IsDiv()) {
3543 DCHECK_EQ(RAX, out.AsRegister());
3544 } else {
3545 DCHECK_EQ(RDX, out.AsRegister());
3546 }
3547
3548 int64_t magic;
3549 int shift;
3550
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003551 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003552 if (instruction->GetResultType() == Primitive::kPrimInt) {
3553 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3554
3555 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3556
3557 __ movl(numerator, eax);
3558
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003559 __ movl(eax, Immediate(magic));
3560 __ imull(numerator);
3561
3562 if (imm > 0 && magic < 0) {
3563 __ addl(edx, numerator);
3564 } else if (imm < 0 && magic > 0) {
3565 __ subl(edx, numerator);
3566 }
3567
3568 if (shift != 0) {
3569 __ sarl(edx, Immediate(shift));
3570 }
3571
3572 __ movl(eax, edx);
3573 __ shrl(edx, Immediate(31));
3574 __ addl(edx, eax);
3575
3576 if (instruction->IsRem()) {
3577 __ movl(eax, numerator);
3578 __ imull(edx, Immediate(imm));
3579 __ subl(eax, edx);
3580 __ movl(edx, eax);
3581 } else {
3582 __ movl(eax, edx);
3583 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003584 } else {
3585 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3586
3587 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3588
3589 CpuRegister rax = eax;
3590 CpuRegister rdx = edx;
3591
3592 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3593
3594 // Save the numerator.
3595 __ movq(numerator, rax);
3596
3597 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003598 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003599
3600 // RDX:RAX = magic * numerator
3601 __ imulq(numerator);
3602
3603 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003604 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003605 __ addq(rdx, numerator);
3606 } else if (imm < 0 && magic > 0) {
3607 // RDX -= numerator
3608 __ subq(rdx, numerator);
3609 }
3610
3611 // Shift if needed.
3612 if (shift != 0) {
3613 __ sarq(rdx, Immediate(shift));
3614 }
3615
3616 // RDX += 1 if RDX < 0
3617 __ movq(rax, rdx);
3618 __ shrq(rdx, Immediate(63));
3619 __ addq(rdx, rax);
3620
3621 if (instruction->IsRem()) {
3622 __ movq(rax, numerator);
3623
3624 if (IsInt<32>(imm)) {
3625 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3626 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003627 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003628 }
3629
3630 __ subq(rax, rdx);
3631 __ movq(rdx, rax);
3632 } else {
3633 __ movq(rax, rdx);
3634 }
3635 }
3636}
3637
Calin Juravlebacfec32014-11-14 15:54:36 +00003638void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3639 DCHECK(instruction->IsDiv() || instruction->IsRem());
3640 Primitive::Type type = instruction->GetResultType();
3641 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3642
3643 bool is_div = instruction->IsDiv();
3644 LocationSummary* locations = instruction->GetLocations();
3645
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003646 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3647 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003648
Roland Levillain271ab9c2014-11-27 15:23:57 +00003649 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003651
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003652 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003653 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003654
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003655 if (imm == 0) {
3656 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3657 } else if (imm == 1 || imm == -1) {
3658 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003659 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003660 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003661 } else {
3662 DCHECK(imm <= -2 || imm >= 2);
3663 GenerateDivRemWithAnyConstant(instruction);
3664 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003665 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003666 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003667 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003668 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003669 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003670
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003671 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3672 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3673 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3674 // so it's safe to just use negl instead of more complex comparisons.
3675 if (type == Primitive::kPrimInt) {
3676 __ cmpl(second_reg, Immediate(-1));
3677 __ j(kEqual, slow_path->GetEntryLabel());
3678 // edx:eax <- sign-extended of eax
3679 __ cdq();
3680 // eax = quotient, edx = remainder
3681 __ idivl(second_reg);
3682 } else {
3683 __ cmpq(second_reg, Immediate(-1));
3684 __ j(kEqual, slow_path->GetEntryLabel());
3685 // rdx:rax <- sign-extended of rax
3686 __ cqo();
3687 // rax = quotient, rdx = remainder
3688 __ idivq(second_reg);
3689 }
3690 __ Bind(slow_path->GetExitLabel());
3691 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003692}
3693
Calin Juravle7c4954d2014-10-28 16:57:40 +00003694void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3695 LocationSummary* locations =
3696 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3697 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003698 case Primitive::kPrimInt:
3699 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003700 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003701 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003702 locations->SetOut(Location::SameAsFirstInput());
3703 // Intel uses edx:eax as the dividend.
3704 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003705 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3706 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3707 // output and request another temp.
3708 if (div->InputAt(1)->IsConstant()) {
3709 locations->AddTemp(Location::RequiresRegister());
3710 }
Calin Juravled0d48522014-11-04 16:40:20 +00003711 break;
3712 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003713
Calin Juravle7c4954d2014-10-28 16:57:40 +00003714 case Primitive::kPrimFloat:
3715 case Primitive::kPrimDouble: {
3716 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003717 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003718 locations->SetOut(Location::SameAsFirstInput());
3719 break;
3720 }
3721
3722 default:
3723 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3724 }
3725}
3726
3727void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3728 LocationSummary* locations = div->GetLocations();
3729 Location first = locations->InAt(0);
3730 Location second = locations->InAt(1);
3731 DCHECK(first.Equals(locations->Out()));
3732
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003733 Primitive::Type type = div->GetResultType();
3734 switch (type) {
3735 case Primitive::kPrimInt:
3736 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003737 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003738 break;
3739 }
3740
Calin Juravle7c4954d2014-10-28 16:57:40 +00003741 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003742 if (second.IsFpuRegister()) {
3743 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3744 } else if (second.IsConstant()) {
3745 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003746 codegen_->LiteralFloatAddress(
3747 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003748 } else {
3749 DCHECK(second.IsStackSlot());
3750 __ divss(first.AsFpuRegister<XmmRegister>(),
3751 Address(CpuRegister(RSP), second.GetStackIndex()));
3752 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003753 break;
3754 }
3755
3756 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003757 if (second.IsFpuRegister()) {
3758 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3759 } else if (second.IsConstant()) {
3760 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003761 codegen_->LiteralDoubleAddress(
3762 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003763 } else {
3764 DCHECK(second.IsDoubleStackSlot());
3765 __ divsd(first.AsFpuRegister<XmmRegister>(),
3766 Address(CpuRegister(RSP), second.GetStackIndex()));
3767 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003768 break;
3769 }
3770
3771 default:
3772 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3773 }
3774}
3775
Calin Juravlebacfec32014-11-14 15:54:36 +00003776void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003777 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003778 LocationSummary* locations =
3779 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003780
3781 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003782 case Primitive::kPrimInt:
3783 case Primitive::kPrimLong: {
3784 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003785 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003786 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3787 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003788 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3789 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3790 // output and request another temp.
3791 if (rem->InputAt(1)->IsConstant()) {
3792 locations->AddTemp(Location::RequiresRegister());
3793 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003794 break;
3795 }
3796
3797 case Primitive::kPrimFloat:
3798 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003799 locations->SetInAt(0, Location::Any());
3800 locations->SetInAt(1, Location::Any());
3801 locations->SetOut(Location::RequiresFpuRegister());
3802 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003803 break;
3804 }
3805
3806 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003807 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003808 }
3809}
3810
3811void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3812 Primitive::Type type = rem->GetResultType();
3813 switch (type) {
3814 case Primitive::kPrimInt:
3815 case Primitive::kPrimLong: {
3816 GenerateDivRemIntegral(rem);
3817 break;
3818 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003819 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003820 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003821 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003822 break;
3823 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003824 default:
3825 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3826 }
3827}
3828
Calin Juravled0d48522014-11-04 16:40:20 +00003829void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003830 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003831 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003832}
3833
3834void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003835 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003836 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3837 codegen_->AddSlowPath(slow_path);
3838
3839 LocationSummary* locations = instruction->GetLocations();
3840 Location value = locations->InAt(0);
3841
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003842 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003843 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003844 case Primitive::kPrimByte:
3845 case Primitive::kPrimChar:
3846 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003847 case Primitive::kPrimInt: {
3848 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003849 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003850 __ j(kEqual, slow_path->GetEntryLabel());
3851 } else if (value.IsStackSlot()) {
3852 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3853 __ j(kEqual, slow_path->GetEntryLabel());
3854 } else {
3855 DCHECK(value.IsConstant()) << value;
3856 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003857 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003858 }
3859 }
3860 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003861 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003862 case Primitive::kPrimLong: {
3863 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003864 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003865 __ j(kEqual, slow_path->GetEntryLabel());
3866 } else if (value.IsDoubleStackSlot()) {
3867 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3868 __ j(kEqual, slow_path->GetEntryLabel());
3869 } else {
3870 DCHECK(value.IsConstant()) << value;
3871 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003872 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003873 }
3874 }
3875 break;
3876 }
3877 default:
3878 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003879 }
Calin Juravled0d48522014-11-04 16:40:20 +00003880}
3881
Calin Juravle9aec02f2014-11-18 23:06:35 +00003882void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3883 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3884
3885 LocationSummary* locations =
3886 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3887
3888 switch (op->GetResultType()) {
3889 case Primitive::kPrimInt:
3890 case Primitive::kPrimLong: {
3891 locations->SetInAt(0, Location::RequiresRegister());
3892 // The shift count needs to be in CL.
3893 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3894 locations->SetOut(Location::SameAsFirstInput());
3895 break;
3896 }
3897 default:
3898 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3899 }
3900}
3901
3902void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3903 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3904
3905 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003906 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003907 Location second = locations->InAt(1);
3908
3909 switch (op->GetResultType()) {
3910 case Primitive::kPrimInt: {
3911 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003912 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003913 if (op->IsShl()) {
3914 __ shll(first_reg, second_reg);
3915 } else if (op->IsShr()) {
3916 __ sarl(first_reg, second_reg);
3917 } else {
3918 __ shrl(first_reg, second_reg);
3919 }
3920 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003921 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003922 if (op->IsShl()) {
3923 __ shll(first_reg, imm);
3924 } else if (op->IsShr()) {
3925 __ sarl(first_reg, imm);
3926 } else {
3927 __ shrl(first_reg, imm);
3928 }
3929 }
3930 break;
3931 }
3932 case Primitive::kPrimLong: {
3933 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003934 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003935 if (op->IsShl()) {
3936 __ shlq(first_reg, second_reg);
3937 } else if (op->IsShr()) {
3938 __ sarq(first_reg, second_reg);
3939 } else {
3940 __ shrq(first_reg, second_reg);
3941 }
3942 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003943 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003944 if (op->IsShl()) {
3945 __ shlq(first_reg, imm);
3946 } else if (op->IsShr()) {
3947 __ sarq(first_reg, imm);
3948 } else {
3949 __ shrq(first_reg, imm);
3950 }
3951 }
3952 break;
3953 }
3954 default:
3955 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003956 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003957 }
3958}
3959
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003960void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3961 LocationSummary* locations =
3962 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3963
3964 switch (ror->GetResultType()) {
3965 case Primitive::kPrimInt:
3966 case Primitive::kPrimLong: {
3967 locations->SetInAt(0, Location::RequiresRegister());
3968 // The shift count needs to be in CL (unless it is a constant).
3969 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3970 locations->SetOut(Location::SameAsFirstInput());
3971 break;
3972 }
3973 default:
3974 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3975 UNREACHABLE();
3976 }
3977}
3978
3979void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3980 LocationSummary* locations = ror->GetLocations();
3981 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3982 Location second = locations->InAt(1);
3983
3984 switch (ror->GetResultType()) {
3985 case Primitive::kPrimInt:
3986 if (second.IsRegister()) {
3987 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3988 __ rorl(first_reg, second_reg);
3989 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003990 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003991 __ rorl(first_reg, imm);
3992 }
3993 break;
3994 case Primitive::kPrimLong:
3995 if (second.IsRegister()) {
3996 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3997 __ rorq(first_reg, second_reg);
3998 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003999 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004000 __ rorq(first_reg, imm);
4001 }
4002 break;
4003 default:
4004 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4005 UNREACHABLE();
4006 }
4007}
4008
Calin Juravle9aec02f2014-11-18 23:06:35 +00004009void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4010 HandleShift(shl);
4011}
4012
4013void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4014 HandleShift(shl);
4015}
4016
4017void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4018 HandleShift(shr);
4019}
4020
4021void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4022 HandleShift(shr);
4023}
4024
4025void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4026 HandleShift(ushr);
4027}
4028
4029void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4030 HandleShift(ushr);
4031}
4032
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004033void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004034 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004035 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004036 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004037 if (instruction->IsStringAlloc()) {
4038 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4039 } else {
4040 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4041 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4042 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004043 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004044}
4045
4046void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004047 // Note: if heap poisoning is enabled, the entry point takes cares
4048 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004049 if (instruction->IsStringAlloc()) {
4050 // String is allocated through StringFactory. Call NewEmptyString entry point.
4051 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004052 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004053 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4054 __ call(Address(temp, code_offset.SizeValue()));
4055 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4056 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004057 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004058 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4059 DCHECK(!codegen_->IsLeafMethod());
4060 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004061}
4062
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004063void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4064 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004065 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004066 InvokeRuntimeCallingConvention calling_convention;
4067 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004068 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004069 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004070 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004071}
4072
4073void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4074 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004075 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
Andreas Gampea5b09a62016-11-17 15:21:22 -08004076 instruction->GetTypeIndex().index_);
Roland Levillain4d027112015-07-01 15:41:14 +01004077 // Note: if heap poisoning is enabled, the entry point takes cares
4078 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004079 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004080 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004081
4082 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004083}
4084
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004085void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004086 LocationSummary* locations =
4087 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004088 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4089 if (location.IsStackSlot()) {
4090 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4091 } else if (location.IsDoubleStackSlot()) {
4092 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4093 }
4094 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004095}
4096
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004097void InstructionCodeGeneratorX86_64::VisitParameterValue(
4098 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004099 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004100}
4101
4102void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4103 LocationSummary* locations =
4104 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4105 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4106}
4107
4108void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4109 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4110 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004111}
4112
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004113void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4114 LocationSummary* locations =
4115 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4116 locations->SetInAt(0, Location::RequiresRegister());
4117 locations->SetOut(Location::RequiresRegister());
4118}
4119
4120void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4121 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004122 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004123 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004124 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004125 __ movq(locations->Out().AsRegister<CpuRegister>(),
4126 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004127 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004128 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004129 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004130 __ movq(locations->Out().AsRegister<CpuRegister>(),
4131 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4132 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004133 __ movq(locations->Out().AsRegister<CpuRegister>(),
4134 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004135 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004136}
4137
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004138void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004139 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004140 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004141 locations->SetInAt(0, Location::RequiresRegister());
4142 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004143}
4144
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004145void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4146 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004147 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4148 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004149 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004150 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004151 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004152 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004153 break;
4154
4155 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004156 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004157 break;
4158
4159 default:
4160 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4161 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004162}
4163
David Brazdil66d126e2015-04-03 16:02:44 +01004164void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4165 LocationSummary* locations =
4166 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4167 locations->SetInAt(0, Location::RequiresRegister());
4168 locations->SetOut(Location::SameAsFirstInput());
4169}
4170
4171void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004172 LocationSummary* locations = bool_not->GetLocations();
4173 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4174 locations->Out().AsRegister<CpuRegister>().AsRegister());
4175 Location out = locations->Out();
4176 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4177}
4178
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004179void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004180 LocationSummary* locations =
4181 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004182 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004183 locations->SetInAt(i, Location::Any());
4184 }
4185 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004186}
4187
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004188void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004189 LOG(FATAL) << "Unimplemented";
4190}
4191
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004192void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004193 /*
4194 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004195 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004196 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4197 */
4198 switch (kind) {
4199 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004200 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004201 break;
4202 }
4203 case MemBarrierKind::kAnyStore:
4204 case MemBarrierKind::kLoadAny:
4205 case MemBarrierKind::kStoreStore: {
4206 // nop
4207 break;
4208 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004209 case MemBarrierKind::kNTStoreStore:
4210 // Non-Temporal Store/Store needs an explicit fence.
4211 MemoryFence(/* non-temporal */ true);
4212 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004213 }
4214}
4215
4216void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4217 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4218
Roland Levillain0d5a2812015-11-13 10:07:31 +00004219 bool object_field_get_with_read_barrier =
4220 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004221 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004222 new (GetGraph()->GetArena()) LocationSummary(instruction,
4223 object_field_get_with_read_barrier ?
4224 LocationSummary::kCallOnSlowPath :
4225 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004226 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004227 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004228 }
Calin Juravle52c48962014-12-16 17:02:57 +00004229 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004230 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4231 locations->SetOut(Location::RequiresFpuRegister());
4232 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004233 // The output overlaps for an object field get when read barriers
4234 // are enabled: we do not want the move to overwrite the object's
4235 // location, as we need it to emit the read barrier.
4236 locations->SetOut(
4237 Location::RequiresRegister(),
4238 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004239 }
Calin Juravle52c48962014-12-16 17:02:57 +00004240}
4241
4242void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4243 const FieldInfo& field_info) {
4244 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4245
4246 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004247 Location base_loc = locations->InAt(0);
4248 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004249 Location out = locations->Out();
4250 bool is_volatile = field_info.IsVolatile();
4251 Primitive::Type field_type = field_info.GetFieldType();
4252 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4253
4254 switch (field_type) {
4255 case Primitive::kPrimBoolean: {
4256 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4257 break;
4258 }
4259
4260 case Primitive::kPrimByte: {
4261 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4262 break;
4263 }
4264
4265 case Primitive::kPrimShort: {
4266 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4267 break;
4268 }
4269
4270 case Primitive::kPrimChar: {
4271 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4272 break;
4273 }
4274
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004275 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004276 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4277 break;
4278 }
4279
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004280 case Primitive::kPrimNot: {
4281 // /* HeapReference<Object> */ out = *(base + offset)
4282 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004283 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004284 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004285 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004286 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004287 if (is_volatile) {
4288 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4289 }
4290 } else {
4291 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4292 codegen_->MaybeRecordImplicitNullCheck(instruction);
4293 if (is_volatile) {
4294 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4295 }
4296 // If read barriers are enabled, emit read barriers other than
4297 // Baker's using a slow path (and also unpoison the loaded
4298 // reference, if heap poisoning is enabled).
4299 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4300 }
4301 break;
4302 }
4303
Calin Juravle52c48962014-12-16 17:02:57 +00004304 case Primitive::kPrimLong: {
4305 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4306 break;
4307 }
4308
4309 case Primitive::kPrimFloat: {
4310 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4311 break;
4312 }
4313
4314 case Primitive::kPrimDouble: {
4315 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4316 break;
4317 }
4318
4319 case Primitive::kPrimVoid:
4320 LOG(FATAL) << "Unreachable type " << field_type;
4321 UNREACHABLE();
4322 }
4323
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004324 if (field_type == Primitive::kPrimNot) {
4325 // Potential implicit null checks, in the case of reference
4326 // fields, are handled in the previous switch statement.
4327 } else {
4328 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004329 }
Roland Levillain4d027112015-07-01 15:41:14 +01004330
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004331 if (is_volatile) {
4332 if (field_type == Primitive::kPrimNot) {
4333 // Memory barriers, in the case of references, are also handled
4334 // in the previous switch statement.
4335 } else {
4336 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4337 }
Roland Levillain4d027112015-07-01 15:41:14 +01004338 }
Calin Juravle52c48962014-12-16 17:02:57 +00004339}
4340
4341void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4342 const FieldInfo& field_info) {
4343 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4344
4345 LocationSummary* locations =
4346 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004347 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004348 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004349 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004350 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004351
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004352 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004353 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004354 if (is_volatile) {
4355 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4356 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4357 } else {
4358 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4359 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004360 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004361 if (is_volatile) {
4362 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4363 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4364 } else {
4365 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4366 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004367 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004368 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004369 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004370 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004371 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004372 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4373 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004374 locations->AddTemp(Location::RequiresRegister());
4375 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004376}
4377
Calin Juravle52c48962014-12-16 17:02:57 +00004378void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004379 const FieldInfo& field_info,
4380 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004381 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4382
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004383 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004384 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4385 Location value = locations->InAt(1);
4386 bool is_volatile = field_info.IsVolatile();
4387 Primitive::Type field_type = field_info.GetFieldType();
4388 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4389
4390 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004391 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004392 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004393
Mark Mendellea5af682015-10-22 17:35:49 -04004394 bool maybe_record_implicit_null_check_done = false;
4395
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004396 switch (field_type) {
4397 case Primitive::kPrimBoolean:
4398 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004399 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004400 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004401 __ movb(Address(base, offset), Immediate(v));
4402 } else {
4403 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4404 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004405 break;
4406 }
4407
4408 case Primitive::kPrimShort:
4409 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004410 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004411 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004412 __ movw(Address(base, offset), Immediate(v));
4413 } else {
4414 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4415 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004416 break;
4417 }
4418
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004419 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004420 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004421 if (value.IsConstant()) {
4422 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004423 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4424 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4425 // Note: if heap poisoning is enabled, no need to poison
4426 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004427 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004428 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004429 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4430 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4431 __ movl(temp, value.AsRegister<CpuRegister>());
4432 __ PoisonHeapReference(temp);
4433 __ movl(Address(base, offset), temp);
4434 } else {
4435 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4436 }
Mark Mendell40741f32015-04-20 22:10:34 -04004437 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004438 break;
4439 }
4440
4441 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004442 if (value.IsConstant()) {
4443 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004444 codegen_->MoveInt64ToAddress(Address(base, offset),
4445 Address(base, offset + sizeof(int32_t)),
4446 v,
4447 instruction);
4448 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004449 } else {
4450 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4451 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004452 break;
4453 }
4454
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004455 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004456 if (value.IsConstant()) {
4457 int32_t v =
4458 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4459 __ movl(Address(base, offset), Immediate(v));
4460 } else {
4461 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4462 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004463 break;
4464 }
4465
4466 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004467 if (value.IsConstant()) {
4468 int64_t v =
4469 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4470 codegen_->MoveInt64ToAddress(Address(base, offset),
4471 Address(base, offset + sizeof(int32_t)),
4472 v,
4473 instruction);
4474 maybe_record_implicit_null_check_done = true;
4475 } else {
4476 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4477 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004478 break;
4479 }
4480
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004481 case Primitive::kPrimVoid:
4482 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004483 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004484 }
Calin Juravle52c48962014-12-16 17:02:57 +00004485
Mark Mendellea5af682015-10-22 17:35:49 -04004486 if (!maybe_record_implicit_null_check_done) {
4487 codegen_->MaybeRecordImplicitNullCheck(instruction);
4488 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004489
4490 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4491 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4492 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004493 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004494 }
4495
Calin Juravle52c48962014-12-16 17:02:57 +00004496 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004497 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004498 }
4499}
4500
4501void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4502 HandleFieldSet(instruction, instruction->GetFieldInfo());
4503}
4504
4505void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004506 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004507}
4508
4509void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004510 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004511}
4512
4513void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004514 HandleFieldGet(instruction, instruction->GetFieldInfo());
4515}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004516
Calin Juravle52c48962014-12-16 17:02:57 +00004517void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4518 HandleFieldGet(instruction);
4519}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004520
Calin Juravle52c48962014-12-16 17:02:57 +00004521void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4522 HandleFieldGet(instruction, instruction->GetFieldInfo());
4523}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004524
Calin Juravle52c48962014-12-16 17:02:57 +00004525void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4526 HandleFieldSet(instruction, instruction->GetFieldInfo());
4527}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004528
Calin Juravle52c48962014-12-16 17:02:57 +00004529void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004530 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004531}
4532
Calin Juravlee460d1d2015-09-29 04:52:17 +01004533void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4534 HUnresolvedInstanceFieldGet* instruction) {
4535 FieldAccessCallingConventionX86_64 calling_convention;
4536 codegen_->CreateUnresolvedFieldLocationSummary(
4537 instruction, instruction->GetFieldType(), calling_convention);
4538}
4539
4540void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4541 HUnresolvedInstanceFieldGet* instruction) {
4542 FieldAccessCallingConventionX86_64 calling_convention;
4543 codegen_->GenerateUnresolvedFieldAccess(instruction,
4544 instruction->GetFieldType(),
4545 instruction->GetFieldIndex(),
4546 instruction->GetDexPc(),
4547 calling_convention);
4548}
4549
4550void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4551 HUnresolvedInstanceFieldSet* instruction) {
4552 FieldAccessCallingConventionX86_64 calling_convention;
4553 codegen_->CreateUnresolvedFieldLocationSummary(
4554 instruction, instruction->GetFieldType(), calling_convention);
4555}
4556
4557void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4558 HUnresolvedInstanceFieldSet* instruction) {
4559 FieldAccessCallingConventionX86_64 calling_convention;
4560 codegen_->GenerateUnresolvedFieldAccess(instruction,
4561 instruction->GetFieldType(),
4562 instruction->GetFieldIndex(),
4563 instruction->GetDexPc(),
4564 calling_convention);
4565}
4566
4567void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4568 HUnresolvedStaticFieldGet* instruction) {
4569 FieldAccessCallingConventionX86_64 calling_convention;
4570 codegen_->CreateUnresolvedFieldLocationSummary(
4571 instruction, instruction->GetFieldType(), calling_convention);
4572}
4573
4574void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4575 HUnresolvedStaticFieldGet* instruction) {
4576 FieldAccessCallingConventionX86_64 calling_convention;
4577 codegen_->GenerateUnresolvedFieldAccess(instruction,
4578 instruction->GetFieldType(),
4579 instruction->GetFieldIndex(),
4580 instruction->GetDexPc(),
4581 calling_convention);
4582}
4583
4584void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4585 HUnresolvedStaticFieldSet* instruction) {
4586 FieldAccessCallingConventionX86_64 calling_convention;
4587 codegen_->CreateUnresolvedFieldLocationSummary(
4588 instruction, instruction->GetFieldType(), calling_convention);
4589}
4590
4591void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4592 HUnresolvedStaticFieldSet* instruction) {
4593 FieldAccessCallingConventionX86_64 calling_convention;
4594 codegen_->GenerateUnresolvedFieldAccess(instruction,
4595 instruction->GetFieldType(),
4596 instruction->GetFieldIndex(),
4597 instruction->GetDexPc(),
4598 calling_convention);
4599}
4600
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004601void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004602 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4603 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4604 ? Location::RequiresRegister()
4605 : Location::Any();
4606 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004607}
4608
Calin Juravle2ae48182016-03-16 14:05:09 +00004609void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4610 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004611 return;
4612 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004613 LocationSummary* locations = instruction->GetLocations();
4614 Location obj = locations->InAt(0);
4615
4616 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004617 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004618}
4619
Calin Juravle2ae48182016-03-16 14:05:09 +00004620void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004621 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004622 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004623
4624 LocationSummary* locations = instruction->GetLocations();
4625 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004626
4627 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004628 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004629 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004630 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004631 } else {
4632 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004633 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004634 __ jmp(slow_path->GetEntryLabel());
4635 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004636 }
4637 __ j(kEqual, slow_path->GetEntryLabel());
4638}
4639
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004640void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004641 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004642}
4643
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004644void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004645 bool object_array_get_with_read_barrier =
4646 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004647 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004648 new (GetGraph()->GetArena()) LocationSummary(instruction,
4649 object_array_get_with_read_barrier ?
4650 LocationSummary::kCallOnSlowPath :
4651 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004652 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004653 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004654 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004655 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004656 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004657 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4658 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4659 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004660 // The output overlaps for an object array get when read barriers
4661 // are enabled: we do not want the move to overwrite the array's
4662 // location, as we need it to emit the read barrier.
4663 locations->SetOut(
4664 Location::RequiresRegister(),
4665 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004666 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004667}
4668
4669void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4670 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004671 Location obj_loc = locations->InAt(0);
4672 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004673 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004674 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004675 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004676
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004677 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004678 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004679 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004680 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004681 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004682 break;
4683 }
4684
4685 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004686 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004687 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004688 break;
4689 }
4690
4691 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004692 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004693 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004694 break;
4695 }
4696
4697 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004698 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004699 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4700 // Branch cases into compressed and uncompressed for each index's type.
4701 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4702 NearLabel done, not_compressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004703 __ testl(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004704 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004705 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4706 "Expecting 0=compressed, 1=uncompressed");
4707 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004708 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4709 __ jmp(&done);
4710 __ Bind(&not_compressed);
4711 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4712 __ Bind(&done);
4713 } else {
4714 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4715 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004716 break;
4717 }
4718
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004719 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004720 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004721 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004722 break;
4723 }
4724
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004725 case Primitive::kPrimNot: {
4726 static_assert(
4727 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4728 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004729 // /* HeapReference<Object> */ out =
4730 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4731 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004732 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004733 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004734 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004735 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004736 } else {
4737 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004738 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4739 codegen_->MaybeRecordImplicitNullCheck(instruction);
4740 // If read barriers are enabled, emit read barriers other than
4741 // Baker's using a slow path (and also unpoison the loaded
4742 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004743 if (index.IsConstant()) {
4744 uint32_t offset =
4745 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004746 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4747 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004748 codegen_->MaybeGenerateReadBarrierSlow(
4749 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4750 }
4751 }
4752 break;
4753 }
4754
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004755 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004756 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004757 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004758 break;
4759 }
4760
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004761 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004762 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004763 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004764 break;
4765 }
4766
4767 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004768 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004769 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004770 break;
4771 }
4772
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004773 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004774 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004775 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004776 }
Roland Levillain4d027112015-07-01 15:41:14 +01004777
4778 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004779 // Potential implicit null checks, in the case of reference
4780 // arrays, are handled in the previous switch statement.
4781 } else {
4782 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004783 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004784}
4785
4786void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004787 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004788
4789 bool needs_write_barrier =
4790 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004791 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004792
Nicolas Geoffray39468442014-09-02 15:17:15 +01004793 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004794 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004795 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004796 LocationSummary::kCallOnSlowPath :
4797 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004798
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004799 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004800 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4801 if (Primitive::IsFloatingPointType(value_type)) {
4802 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004803 } else {
4804 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4805 }
4806
4807 if (needs_write_barrier) {
4808 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004809 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004810 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004811 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004812}
4813
4814void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4815 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004816 Location array_loc = locations->InAt(0);
4817 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004818 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004819 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004820 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004821 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004822 bool needs_write_barrier =
4823 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004824 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4825 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4826 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004827
4828 switch (value_type) {
4829 case Primitive::kPrimBoolean:
4830 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004831 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004832 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004833 if (value.IsRegister()) {
4834 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004835 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004836 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004837 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004838 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004839 break;
4840 }
4841
4842 case Primitive::kPrimShort:
4843 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004844 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004845 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004846 if (value.IsRegister()) {
4847 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004848 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004849 DCHECK(value.IsConstant()) << value;
4850 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004851 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004852 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004853 break;
4854 }
4855
4856 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004857 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004858 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004859
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004860 if (!value.IsRegister()) {
4861 // Just setting null.
4862 DCHECK(instruction->InputAt(2)->IsNullConstant());
4863 DCHECK(value.IsConstant()) << value;
4864 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004865 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004866 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004867 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004868 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004869 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004870
4871 DCHECK(needs_write_barrier);
4872 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004873 // We cannot use a NearLabel for `done`, as its range may be too
4874 // short when Baker read barriers are enabled.
4875 Label done;
4876 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004877 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004878 Location temp_loc = locations->GetTemp(0);
4879 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004880 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004881 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4882 codegen_->AddSlowPath(slow_path);
4883 if (instruction->GetValueCanBeNull()) {
4884 __ testl(register_value, register_value);
4885 __ j(kNotEqual, &not_null);
4886 __ movl(address, Immediate(0));
4887 codegen_->MaybeRecordImplicitNullCheck(instruction);
4888 __ jmp(&done);
4889 __ Bind(&not_null);
4890 }
4891
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004892 // Note that when Baker read barriers are enabled, the type
4893 // checks are performed without read barriers. This is fine,
4894 // even in the case where a class object is in the from-space
4895 // after the flip, as a comparison involving such a type would
4896 // not produce a false positive; it may of course produce a
4897 // false negative, in which case we would take the ArraySet
4898 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004899
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004900 // /* HeapReference<Class> */ temp = array->klass_
4901 __ movl(temp, Address(array, class_offset));
4902 codegen_->MaybeRecordImplicitNullCheck(instruction);
4903 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004904
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004905 // /* HeapReference<Class> */ temp = temp->component_type_
4906 __ movl(temp, Address(temp, component_offset));
4907 // If heap poisoning is enabled, no need to unpoison `temp`
4908 // nor the object reference in `register_value->klass`, as
4909 // we are comparing two poisoned references.
4910 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004911
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004912 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4913 __ j(kEqual, &do_put);
4914 // If heap poisoning is enabled, the `temp` reference has
4915 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004916 __ MaybeUnpoisonHeapReference(temp);
4917
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004918 // If heap poisoning is enabled, no need to unpoison the
4919 // heap reference loaded below, as it is only used for a
4920 // comparison with null.
4921 __ cmpl(Address(temp, super_offset), Immediate(0));
4922 __ j(kNotEqual, slow_path->GetEntryLabel());
4923 __ Bind(&do_put);
4924 } else {
4925 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004926 }
4927 }
4928
4929 if (kPoisonHeapReferences) {
4930 __ movl(temp, register_value);
4931 __ PoisonHeapReference(temp);
4932 __ movl(address, temp);
4933 } else {
4934 __ movl(address, register_value);
4935 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004936 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004937 codegen_->MaybeRecordImplicitNullCheck(instruction);
4938 }
4939
4940 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4941 codegen_->MarkGCCard(
4942 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4943 __ Bind(&done);
4944
4945 if (slow_path != nullptr) {
4946 __ Bind(slow_path->GetExitLabel());
4947 }
4948
4949 break;
4950 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004951
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004952 case Primitive::kPrimInt: {
4953 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004954 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004955 if (value.IsRegister()) {
4956 __ movl(address, value.AsRegister<CpuRegister>());
4957 } else {
4958 DCHECK(value.IsConstant()) << value;
4959 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4960 __ movl(address, Immediate(v));
4961 }
4962 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004963 break;
4964 }
4965
4966 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004967 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004968 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004969 if (value.IsRegister()) {
4970 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004971 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004972 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004973 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004974 Address address_high =
4975 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004976 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004977 }
4978 break;
4979 }
4980
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004981 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004982 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004983 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004984 if (value.IsFpuRegister()) {
4985 __ movss(address, value.AsFpuRegister<XmmRegister>());
4986 } else {
4987 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004988 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004989 __ movl(address, Immediate(v));
4990 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004991 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004992 break;
4993 }
4994
4995 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004996 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004997 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004998 if (value.IsFpuRegister()) {
4999 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5000 codegen_->MaybeRecordImplicitNullCheck(instruction);
5001 } else {
5002 int64_t v =
5003 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005004 Address address_high =
5005 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005006 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5007 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005008 break;
5009 }
5010
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005011 case Primitive::kPrimVoid:
5012 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005013 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005014 }
5015}
5016
5017void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005018 LocationSummary* locations =
5019 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005020 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005021 if (!instruction->IsEmittedAtUseSite()) {
5022 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5023 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005024}
5025
5026void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005027 if (instruction->IsEmittedAtUseSite()) {
5028 return;
5029 }
5030
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005031 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005032 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005033 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5034 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005035 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005036 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005037 // Mask out most significant bit in case the array is String's array of char.
5038 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005039 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005040 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005041}
5042
5043void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005044 RegisterSet caller_saves = RegisterSet::Empty();
5045 InvokeRuntimeCallingConvention calling_convention;
5046 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5047 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5048 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005049 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005050 HInstruction* length = instruction->InputAt(1);
5051 if (!length->IsEmittedAtUseSite()) {
5052 locations->SetInAt(1, Location::RegisterOrConstant(length));
5053 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005054}
5055
5056void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5057 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005058 Location index_loc = locations->InAt(0);
5059 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005060 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005061
Mark Mendell99dbd682015-04-22 16:18:52 -04005062 if (length_loc.IsConstant()) {
5063 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5064 if (index_loc.IsConstant()) {
5065 // BCE will remove the bounds check if we are guarenteed to pass.
5066 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5067 if (index < 0 || index >= length) {
5068 codegen_->AddSlowPath(slow_path);
5069 __ jmp(slow_path->GetEntryLabel());
5070 } else {
5071 // Some optimization after BCE may have generated this, and we should not
5072 // generate a bounds check if it is a valid range.
5073 }
5074 return;
5075 }
5076
5077 // We have to reverse the jump condition because the length is the constant.
5078 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5079 __ cmpl(index_reg, Immediate(length));
5080 codegen_->AddSlowPath(slow_path);
5081 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005082 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005083 HInstruction* array_length = instruction->InputAt(1);
5084 if (array_length->IsEmittedAtUseSite()) {
5085 // Address the length field in the array.
5086 DCHECK(array_length->IsArrayLength());
5087 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5088 Location array_loc = array_length->GetLocations()->InAt(0);
5089 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005090 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005091 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5092 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005093 CpuRegister length_reg = CpuRegister(TMP);
5094 __ movl(length_reg, array_len);
5095 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005096 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005097 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005098 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005099 // Checking the bound for general case:
5100 // Array of char or String's array when the compression feature off.
5101 if (index_loc.IsConstant()) {
5102 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5103 __ cmpl(array_len, Immediate(value));
5104 } else {
5105 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5106 }
5107 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005108 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005109 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005110 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005111 }
5112 codegen_->AddSlowPath(slow_path);
5113 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005114 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005115}
5116
5117void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5118 CpuRegister card,
5119 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005120 CpuRegister value,
5121 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005122 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005123 if (value_can_be_null) {
5124 __ testl(value, value);
5125 __ j(kEqual, &is_null);
5126 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005127 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005128 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005129 __ movq(temp, object);
5130 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005131 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005132 if (value_can_be_null) {
5133 __ Bind(&is_null);
5134 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005135}
5136
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005137void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005138 LOG(FATAL) << "Unimplemented";
5139}
5140
5141void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005142 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5143}
5144
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005145void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005146 LocationSummary* locations =
5147 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005148 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005149}
5150
5151void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005152 HBasicBlock* block = instruction->GetBlock();
5153 if (block->GetLoopInformation() != nullptr) {
5154 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5155 // The back edge will generate the suspend check.
5156 return;
5157 }
5158 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5159 // The goto will generate the suspend check.
5160 return;
5161 }
5162 GenerateSuspendCheck(instruction, nullptr);
5163}
5164
5165void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5166 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005167 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005168 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5169 if (slow_path == nullptr) {
5170 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5171 instruction->SetSlowPath(slow_path);
5172 codegen_->AddSlowPath(slow_path);
5173 if (successor != nullptr) {
5174 DCHECK(successor->IsLoopHeader());
5175 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5176 }
5177 } else {
5178 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5179 }
5180
Andreas Gampe542451c2016-07-26 09:02:02 -07005181 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005182 /* no_rip */ true),
5183 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005184 if (successor == nullptr) {
5185 __ j(kNotEqual, slow_path->GetEntryLabel());
5186 __ Bind(slow_path->GetReturnLabel());
5187 } else {
5188 __ j(kEqual, codegen_->GetLabelOf(successor));
5189 __ jmp(slow_path->GetEntryLabel());
5190 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005191}
5192
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005193X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5194 return codegen_->GetAssembler();
5195}
5196
5197void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005198 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005199 Location source = move->GetSource();
5200 Location destination = move->GetDestination();
5201
5202 if (source.IsRegister()) {
5203 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005204 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005205 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005206 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005207 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005208 } else {
5209 DCHECK(destination.IsDoubleStackSlot());
5210 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005211 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005212 }
5213 } else if (source.IsStackSlot()) {
5214 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005215 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005216 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005217 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005218 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005219 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005220 } else {
5221 DCHECK(destination.IsStackSlot());
5222 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5223 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5224 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005225 } else if (source.IsDoubleStackSlot()) {
5226 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005227 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005228 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005229 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005230 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5231 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005232 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005233 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005234 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5235 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5236 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005237 } else if (source.IsConstant()) {
5238 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005239 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5240 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005241 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005242 if (value == 0) {
5243 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5244 } else {
5245 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5246 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005247 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005248 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005249 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005250 }
5251 } else if (constant->IsLongConstant()) {
5252 int64_t value = constant->AsLongConstant()->GetValue();
5253 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005254 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005255 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005256 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005257 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005258 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005259 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005260 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005261 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005262 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005263 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005264 } else {
5265 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005266 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005267 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5268 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005269 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005270 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005271 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005272 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005274 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005275 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005276 } else {
5277 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005278 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005279 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005280 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005281 } else if (source.IsFpuRegister()) {
5282 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005283 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005284 } else if (destination.IsStackSlot()) {
5285 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005286 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005287 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005288 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005289 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005290 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005291 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005292 }
5293}
5294
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005295void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005296 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005297 __ movl(Address(CpuRegister(RSP), mem), reg);
5298 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005299}
5300
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005301void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005302 ScratchRegisterScope ensure_scratch(
5303 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5304
5305 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5306 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5307 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5308 Address(CpuRegister(RSP), mem2 + stack_offset));
5309 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5310 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5311 CpuRegister(ensure_scratch.GetRegister()));
5312}
5313
Mark Mendell8a1c7282015-06-29 15:41:28 -04005314void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5315 __ movq(CpuRegister(TMP), reg1);
5316 __ movq(reg1, reg2);
5317 __ movq(reg2, CpuRegister(TMP));
5318}
5319
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005320void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5321 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5322 __ movq(Address(CpuRegister(RSP), mem), reg);
5323 __ movq(reg, CpuRegister(TMP));
5324}
5325
5326void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5327 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005328 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005329
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005330 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5331 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5332 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5333 Address(CpuRegister(RSP), mem2 + stack_offset));
5334 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5335 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5336 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005337}
5338
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005339void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5340 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5341 __ movss(Address(CpuRegister(RSP), mem), reg);
5342 __ movd(reg, CpuRegister(TMP));
5343}
5344
5345void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5346 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5347 __ movsd(Address(CpuRegister(RSP), mem), reg);
5348 __ movd(reg, CpuRegister(TMP));
5349}
5350
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005351void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005352 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005353 Location source = move->GetSource();
5354 Location destination = move->GetDestination();
5355
5356 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005357 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005358 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005359 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005360 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005361 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005362 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005363 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5364 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005365 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005366 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005367 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005368 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5369 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005370 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005371 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5372 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5373 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005374 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005375 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005376 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005377 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005378 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005379 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005380 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005381 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005382 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005383 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005384 }
5385}
5386
5387
5388void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5389 __ pushq(CpuRegister(reg));
5390}
5391
5392
5393void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5394 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005395}
5396
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005397void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005398 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005399 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5400 Immediate(mirror::Class::kStatusInitialized));
5401 __ j(kLess, slow_path->GetEntryLabel());
5402 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005403 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005404}
5405
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005406HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5407 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005408 switch (desired_class_load_kind) {
5409 case HLoadClass::LoadKind::kReferrersClass:
5410 break;
5411 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5412 DCHECK(!GetCompilerOptions().GetCompilePic());
5413 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5414 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5415 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5416 DCHECK(GetCompilerOptions().GetCompilePic());
5417 break;
5418 case HLoadClass::LoadKind::kBootImageAddress:
5419 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005420 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005421 break;
5422 case HLoadClass::LoadKind::kDexCachePcRelative:
5423 DCHECK(!Runtime::Current()->UseJitCompilation());
5424 break;
5425 case HLoadClass::LoadKind::kDexCacheViaMethod:
5426 break;
5427 }
5428 return desired_class_load_kind;
5429}
5430
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005431void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005432 if (cls->NeedsAccessCheck()) {
5433 InvokeRuntimeCallingConvention calling_convention;
5434 CodeGenerator::CreateLoadClassLocationSummary(
5435 cls,
5436 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5437 Location::RegisterLocation(RAX),
5438 /* code_generator_supports_read_barrier */ true);
5439 return;
5440 }
5441
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005442 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5443 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005444 ? LocationSummary::kCallOnSlowPath
5445 : LocationSummary::kNoCall;
5446 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005447 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005448 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005449 }
5450
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005451 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5452 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5453 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5454 locations->SetInAt(0, Location::RequiresRegister());
5455 }
5456 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005457}
5458
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005459Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
5460 dex::TypeIndex dex_index,
5461 uint64_t address) {
5462 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
5463 // Add a patch entry and return the label.
5464 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
5465 PatchInfo<Label>* info = &jit_class_patches_.back();
5466 return &info->label;
5467}
5468
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005469void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005470 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005471 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08005472 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescuba45db02016-07-12 22:53:02 +01005473 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005474 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005475 return;
5476 }
5477
Roland Levillain0d5a2812015-11-13 10:07:31 +00005478 Location out_loc = locations->Out();
5479 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005480
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005481 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5482 ? kWithoutReadBarrier
5483 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005484 bool generate_null_check = false;
5485 switch (cls->GetLoadKind()) {
5486 case HLoadClass::LoadKind::kReferrersClass: {
5487 DCHECK(!cls->CanCallRuntime());
5488 DCHECK(!cls->MustGenerateClinitCheck());
5489 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5490 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5491 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005492 cls,
5493 out_loc,
5494 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005495 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005496 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005497 break;
5498 }
5499 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005500 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005501 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5502 codegen_->RecordTypePatch(cls);
5503 break;
5504 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005505 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005506 DCHECK_NE(cls->GetAddress(), 0u);
5507 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5508 __ movl(out, Immediate(address)); // Zero-extended.
5509 codegen_->RecordSimplePatch();
5510 break;
5511 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005512 case HLoadClass::LoadKind::kJitTableAddress: {
5513 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5514 /* no_rip */ true);
5515 Label* fixup_label =
5516 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005517 // /* GcRoot<mirror::Class> */ out = *address
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005518 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005519 break;
5520 }
5521 case HLoadClass::LoadKind::kDexCachePcRelative: {
5522 uint32_t offset = cls->GetDexCacheElementOffset();
5523 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5524 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5525 /* no_rip */ false);
5526 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005527 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005528 generate_null_check = !cls->IsInDexCache();
5529 break;
5530 }
5531 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5532 // /* GcRoot<mirror::Class>[] */ out =
5533 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5534 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5535 __ movq(out,
5536 Address(current_method,
5537 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5538 // /* GcRoot<mirror::Class> */ out = out[type_index]
5539 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005540 cls,
5541 out_loc,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005542 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
Roland Levillain00468f32016-10-27 18:02:48 +01005543 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005544 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005545 generate_null_check = !cls->IsInDexCache();
5546 break;
5547 }
5548 default:
5549 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5550 UNREACHABLE();
5551 }
5552
5553 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5554 DCHECK(cls->CanCallRuntime());
5555 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5556 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5557 codegen_->AddSlowPath(slow_path);
5558 if (generate_null_check) {
5559 __ testl(out, out);
5560 __ j(kEqual, slow_path->GetEntryLabel());
5561 }
5562 if (cls->MustGenerateClinitCheck()) {
5563 GenerateClassInitializationCheck(slow_path, out);
5564 } else {
5565 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005566 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005567 }
5568}
5569
5570void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5571 LocationSummary* locations =
5572 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5573 locations->SetInAt(0, Location::RequiresRegister());
5574 if (check->HasUses()) {
5575 locations->SetOut(Location::SameAsFirstInput());
5576 }
5577}
5578
5579void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005580 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005581 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005582 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005583 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005584 GenerateClassInitializationCheck(slow_path,
5585 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005586}
5587
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005588HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5589 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005590 switch (desired_string_load_kind) {
5591 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5592 DCHECK(!GetCompilerOptions().GetCompilePic());
5593 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5594 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5595 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5596 DCHECK(GetCompilerOptions().GetCompilePic());
5597 break;
5598 case HLoadString::LoadKind::kBootImageAddress:
5599 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005600 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005601 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005602 break;
5603 case HLoadString::LoadKind::kDexCacheViaMethod:
5604 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005605 case HLoadString::LoadKind::kJitTableAddress:
5606 DCHECK(Runtime::Current()->UseJitCompilation());
5607 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005608 }
5609 return desired_string_load_kind;
5610}
5611
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005612void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005613 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005614 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005615 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005616 locations->SetOut(Location::RegisterLocation(RAX));
5617 } else {
5618 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005619 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5620 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5621 // Rely on the pResolveString and/or marking to save everything.
5622 // Custom calling convention: RAX serves as both input and output.
5623 RegisterSet caller_saves = RegisterSet::Empty();
5624 caller_saves.Add(Location::RegisterLocation(RAX));
5625 locations->SetCustomSlowPathCallerSaves(caller_saves);
5626 } else {
5627 // For non-Baker read barrier we have a temp-clobbering call.
5628 }
5629 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005630 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005631}
5632
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005633Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005634 dex::StringIndex dex_index,
5635 Handle<mirror::String> handle) {
5636 jit_string_roots_.Overwrite(
5637 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005638 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005639 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005640 PatchInfo<Label>* info = &jit_string_patches_.back();
5641 return &info->label;
5642}
5643
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005644// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5645// move.
5646void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005647 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005648 Location out_loc = locations->Out();
5649 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005650
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005651 switch (load->GetLoadKind()) {
5652 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005653 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005654 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005655 return; // No dex cache slow path.
5656 }
5657 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005658 uint32_t address = dchecked_integral_cast<uint32_t>(
5659 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5660 DCHECK_NE(address, 0u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005661 __ movl(out, Immediate(address)); // Zero-extended.
5662 codegen_->RecordSimplePatch();
5663 return; // No dex cache slow path.
5664 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005665 case HLoadString::LoadKind::kBssEntry: {
5666 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5667 /* no_rip */ false);
5668 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5669 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005670 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005671 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5672 codegen_->AddSlowPath(slow_path);
5673 __ testl(out, out);
5674 __ j(kEqual, slow_path->GetEntryLabel());
5675 __ Bind(slow_path->GetExitLabel());
5676 return;
5677 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005678 case HLoadString::LoadKind::kJitTableAddress: {
5679 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5680 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005681 Label* fixup_label = codegen_->NewJitRootStringPatch(
5682 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005683 // /* GcRoot<mirror::String> */ out = *address
5684 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5685 return;
5686 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005687 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005688 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005689 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005690
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005691 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005692 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005693 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005694 codegen_->InvokeRuntime(kQuickResolveString,
5695 load,
5696 load->GetDexPc());
5697 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005698}
5699
David Brazdilcb1c0552015-08-04 16:22:25 +01005700static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005701 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005702 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005703}
5704
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005705void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5706 LocationSummary* locations =
5707 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5708 locations->SetOut(Location::RequiresRegister());
5709}
5710
5711void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005712 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5713}
5714
5715void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5716 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5717}
5718
5719void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5720 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005721}
5722
5723void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5724 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005725 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005726 InvokeRuntimeCallingConvention calling_convention;
5727 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5728}
5729
5730void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005731 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005732 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005733}
5734
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005735static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5736 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005737 // We need a temporary for holding the iftable length.
5738 return true;
5739 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005740 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005741 !kUseBakerReadBarrier &&
5742 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005743 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5744 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5745}
5746
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005747static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5748 return kEmitCompilerReadBarrier &&
5749 !kUseBakerReadBarrier &&
5750 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5751 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5752 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5753}
5754
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005755void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005756 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005757 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005758 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005759 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005760 case TypeCheckKind::kExactCheck:
5761 case TypeCheckKind::kAbstractClassCheck:
5762 case TypeCheckKind::kClassHierarchyCheck:
5763 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005764 call_kind =
5765 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005766 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005767 break;
5768 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005769 case TypeCheckKind::kUnresolvedCheck:
5770 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005771 call_kind = LocationSummary::kCallOnSlowPath;
5772 break;
5773 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005774
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005775 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005776 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005777 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005778 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005779 locations->SetInAt(0, Location::RequiresRegister());
5780 locations->SetInAt(1, Location::Any());
5781 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5782 locations->SetOut(Location::RequiresRegister());
5783 // When read barriers are enabled, we need a temporary register for
5784 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005785 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005786 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005787 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005788}
5789
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005790void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005791 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005792 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 Location obj_loc = locations->InAt(0);
5794 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005795 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005796 Location out_loc = locations->Out();
5797 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005798 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005799 locations->GetTemp(0) :
5800 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005801 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005802 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5803 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5804 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005805 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005806 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005807
5808 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005809 // Avoid null check if we know obj is not null.
5810 if (instruction->MustDoNullCheck()) {
5811 __ testl(obj, obj);
5812 __ j(kEqual, &zero);
5813 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005814
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005815 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005816 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005817 // /* HeapReference<Class> */ out = obj->klass_
5818 GenerateReferenceLoadTwoRegisters(instruction,
5819 out_loc,
5820 obj_loc,
5821 class_offset,
5822 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005823 if (cls.IsRegister()) {
5824 __ cmpl(out, cls.AsRegister<CpuRegister>());
5825 } else {
5826 DCHECK(cls.IsStackSlot()) << cls;
5827 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5828 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005829 if (zero.IsLinked()) {
5830 // Classes must be equal for the instanceof to succeed.
5831 __ j(kNotEqual, &zero);
5832 __ movl(out, Immediate(1));
5833 __ jmp(&done);
5834 } else {
5835 __ setcc(kEqual, out);
5836 // setcc only sets the low byte.
5837 __ andl(out, Immediate(1));
5838 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005839 break;
5840 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005841
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005842 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005843 // /* HeapReference<Class> */ out = obj->klass_
5844 GenerateReferenceLoadTwoRegisters(instruction,
5845 out_loc,
5846 obj_loc,
5847 class_offset,
5848 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005849 // If the class is abstract, we eagerly fetch the super class of the
5850 // object to avoid doing a comparison we know will fail.
5851 NearLabel loop, success;
5852 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005853 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005854 GenerateReferenceLoadOneRegister(instruction,
5855 out_loc,
5856 super_offset,
5857 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005858 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005859 __ testl(out, out);
5860 // If `out` is null, we use it for the result, and jump to `done`.
5861 __ j(kEqual, &done);
5862 if (cls.IsRegister()) {
5863 __ cmpl(out, cls.AsRegister<CpuRegister>());
5864 } else {
5865 DCHECK(cls.IsStackSlot()) << cls;
5866 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5867 }
5868 __ j(kNotEqual, &loop);
5869 __ movl(out, Immediate(1));
5870 if (zero.IsLinked()) {
5871 __ jmp(&done);
5872 }
5873 break;
5874 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005875
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005876 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005877 // /* HeapReference<Class> */ out = obj->klass_
5878 GenerateReferenceLoadTwoRegisters(instruction,
5879 out_loc,
5880 obj_loc,
5881 class_offset,
5882 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005883 // Walk over the class hierarchy to find a match.
5884 NearLabel loop, success;
5885 __ Bind(&loop);
5886 if (cls.IsRegister()) {
5887 __ cmpl(out, cls.AsRegister<CpuRegister>());
5888 } else {
5889 DCHECK(cls.IsStackSlot()) << cls;
5890 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5891 }
5892 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005893 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005894 GenerateReferenceLoadOneRegister(instruction,
5895 out_loc,
5896 super_offset,
5897 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005898 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005899 __ testl(out, out);
5900 __ j(kNotEqual, &loop);
5901 // If `out` is null, we use it for the result, and jump to `done`.
5902 __ jmp(&done);
5903 __ Bind(&success);
5904 __ movl(out, Immediate(1));
5905 if (zero.IsLinked()) {
5906 __ jmp(&done);
5907 }
5908 break;
5909 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005910
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005911 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005912 // /* HeapReference<Class> */ out = obj->klass_
5913 GenerateReferenceLoadTwoRegisters(instruction,
5914 out_loc,
5915 obj_loc,
5916 class_offset,
5917 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005918 // Do an exact check.
5919 NearLabel exact_check;
5920 if (cls.IsRegister()) {
5921 __ cmpl(out, cls.AsRegister<CpuRegister>());
5922 } else {
5923 DCHECK(cls.IsStackSlot()) << cls;
5924 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5925 }
5926 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005927 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005928 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005929 GenerateReferenceLoadOneRegister(instruction,
5930 out_loc,
5931 component_offset,
5932 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005933 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005934 __ testl(out, out);
5935 // If `out` is null, we use it for the result, and jump to `done`.
5936 __ j(kEqual, &done);
5937 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5938 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005939 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005940 __ movl(out, Immediate(1));
5941 __ jmp(&done);
5942 break;
5943 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005944
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005945 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005946 // No read barrier since the slow path will retry upon failure.
5947 // /* HeapReference<Class> */ out = obj->klass_
5948 GenerateReferenceLoadTwoRegisters(instruction,
5949 out_loc,
5950 obj_loc,
5951 class_offset,
5952 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005953 if (cls.IsRegister()) {
5954 __ cmpl(out, cls.AsRegister<CpuRegister>());
5955 } else {
5956 DCHECK(cls.IsStackSlot()) << cls;
5957 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5958 }
5959 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005960 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5961 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005962 codegen_->AddSlowPath(slow_path);
5963 __ j(kNotEqual, slow_path->GetEntryLabel());
5964 __ movl(out, Immediate(1));
5965 if (zero.IsLinked()) {
5966 __ jmp(&done);
5967 }
5968 break;
5969 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005970
Calin Juravle98893e12015-10-02 21:05:03 +01005971 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005972 case TypeCheckKind::kInterfaceCheck: {
5973 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005974 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005975 // cases.
5976 //
5977 // We cannot directly call the InstanceofNonTrivial runtime
5978 // entry point without resorting to a type checking slow path
5979 // here (i.e. by calling InvokeRuntime directly), as it would
5980 // require to assign fixed registers for the inputs of this
5981 // HInstanceOf instruction (following the runtime calling
5982 // convention), which might be cluttered by the potential first
5983 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005984 //
5985 // TODO: Introduce a new runtime entry point taking the object
5986 // to test (instead of its class) as argument, and let it deal
5987 // with the read barrier issues. This will let us refactor this
5988 // case of the `switch` code as it was previously (with a direct
5989 // call to the runtime not using a type checking slow path).
5990 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005991 DCHECK(locations->OnlyCallsOnSlowPath());
5992 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5993 /* is_fatal */ false);
5994 codegen_->AddSlowPath(slow_path);
5995 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005996 if (zero.IsLinked()) {
5997 __ jmp(&done);
5998 }
5999 break;
6000 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006001 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006002
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006003 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006004 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006005 __ xorl(out, out);
6006 }
6007
6008 if (done.IsLinked()) {
6009 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006010 }
6011
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006012 if (slow_path != nullptr) {
6013 __ Bind(slow_path->GetExitLabel());
6014 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006015}
6016
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006017static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006018 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006019 case TypeCheckKind::kExactCheck:
6020 case TypeCheckKind::kAbstractClassCheck:
6021 case TypeCheckKind::kClassHierarchyCheck:
6022 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006023 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006024 case TypeCheckKind::kInterfaceCheck:
6025 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006026 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006027 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006028 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006029 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006030 LOG(FATAL) << "Unreachable";
6031 UNREACHABLE();
6032}
6033
6034void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6035 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6036 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6037 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6038 LocationSummary::CallKind call_kind = is_fatal_slow_path
6039 ? LocationSummary::kNoCall
6040 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006041 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6042 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006043 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6044 // Require a register for the interface check since there is a loop that compares the class to
6045 // a memory address.
6046 locations->SetInAt(1, Location::RequiresRegister());
6047 } else {
6048 locations->SetInAt(1, Location::Any());
6049 }
6050
Roland Levillain0d5a2812015-11-13 10:07:31 +00006051 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6052 locations->AddTemp(Location::RequiresRegister());
6053 // When read barriers are enabled, we need an additional temporary
6054 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006055 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006056 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006057 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006058}
6059
6060void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006061 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006062 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006063 Location obj_loc = locations->InAt(0);
6064 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006065 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006066 Location temp_loc = locations->GetTemp(0);
6067 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006068 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006069 locations->GetTemp(1) :
6070 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006071 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6072 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6073 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6074 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6075 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6076 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006077 const uint32_t object_array_data_offset =
6078 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006079
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006080 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6081 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6082 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006083 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006084 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006085 SlowPathCode* type_check_slow_path =
6086 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6087 is_type_check_slow_path_fatal);
6088 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006089
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006090
6091 NearLabel done;
6092 // Avoid null check if we know obj is not null.
6093 if (instruction->MustDoNullCheck()) {
6094 __ testl(obj, obj);
6095 __ j(kEqual, &done);
6096 }
6097
Roland Levillain0d5a2812015-11-13 10:07:31 +00006098 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006099 case TypeCheckKind::kExactCheck:
6100 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006101 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006102 GenerateReferenceLoadTwoRegisters(instruction,
6103 temp_loc,
6104 obj_loc,
6105 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006106 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006107 if (cls.IsRegister()) {
6108 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6109 } else {
6110 DCHECK(cls.IsStackSlot()) << cls;
6111 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6112 }
6113 // Jump to slow path for throwing the exception or doing a
6114 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006115 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006116 break;
6117 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006118
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006119 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006120 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006121 GenerateReferenceLoadTwoRegisters(instruction,
6122 temp_loc,
6123 obj_loc,
6124 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006125 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006126 // If the class is abstract, we eagerly fetch the super class of the
6127 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006128 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006129 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006130 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006131 GenerateReferenceLoadOneRegister(instruction,
6132 temp_loc,
6133 super_offset,
6134 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006135 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006136
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006137 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6138 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006139 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006140 // Otherwise, compare the classes.
6141 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006142 if (cls.IsRegister()) {
6143 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6144 } else {
6145 DCHECK(cls.IsStackSlot()) << cls;
6146 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6147 }
6148 __ j(kNotEqual, &loop);
6149 break;
6150 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006151
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006152 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006153 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006154 GenerateReferenceLoadTwoRegisters(instruction,
6155 temp_loc,
6156 obj_loc,
6157 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006158 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006159 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006160 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006161 __ Bind(&loop);
6162 if (cls.IsRegister()) {
6163 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6164 } else {
6165 DCHECK(cls.IsStackSlot()) << cls;
6166 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6167 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006168 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006169
Roland Levillain0d5a2812015-11-13 10:07:31 +00006170 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006171 GenerateReferenceLoadOneRegister(instruction,
6172 temp_loc,
6173 super_offset,
6174 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006175 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006176
6177 // If the class reference currently in `temp` is not null, jump
6178 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006179 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006180 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006181 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006182 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006183 break;
6184 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006185
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006186 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006187 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006188 GenerateReferenceLoadTwoRegisters(instruction,
6189 temp_loc,
6190 obj_loc,
6191 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006192 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006193 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006194 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006195 if (cls.IsRegister()) {
6196 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6197 } else {
6198 DCHECK(cls.IsStackSlot()) << cls;
6199 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6200 }
6201 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006202
6203 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006204 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006205 GenerateReferenceLoadOneRegister(instruction,
6206 temp_loc,
6207 component_offset,
6208 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006209 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006210
6211 // If the component type is not null (i.e. the object is indeed
6212 // an array), jump to label `check_non_primitive_component_type`
6213 // to further check that this component type is not a primitive
6214 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006215 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006216 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006217 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006218 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006219 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006220 break;
6221 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006222
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006223 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006224 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006225 //
6226 // We cannot directly call the CheckCast runtime entry point
6227 // without resorting to a type checking slow path here (i.e. by
6228 // calling InvokeRuntime directly), as it would require to
6229 // assign fixed registers for the inputs of this HInstanceOf
6230 // instruction (following the runtime calling convention), which
6231 // might be cluttered by the potential first read barrier
6232 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006233 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006234 break;
6235 }
6236
6237 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006238 // Fast path for the interface check. We always go slow path for heap poisoning since
6239 // unpoisoning cls would require an extra temp.
6240 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006241 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6242 // doing this.
6243 // /* HeapReference<Class> */ temp = obj->klass_
6244 GenerateReferenceLoadTwoRegisters(instruction,
6245 temp_loc,
6246 obj_loc,
6247 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006248 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006249
6250 // /* HeapReference<Class> */ temp = temp->iftable_
6251 GenerateReferenceLoadTwoRegisters(instruction,
6252 temp_loc,
6253 temp_loc,
6254 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006255 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006256 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006257 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006258 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006259 NearLabel start_loop;
6260 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006261 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006262 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006263 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6264 // Go to next interface if the classes do not match.
6265 __ cmpl(cls.AsRegister<CpuRegister>(),
6266 CodeGeneratorX86_64::ArrayAddress(temp,
6267 maybe_temp2_loc,
6268 TIMES_4,
6269 object_array_data_offset));
6270 __ j(kNotEqual, &start_loop); // Return if same class.
6271 } else {
6272 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006273 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006274 break;
6275 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006276
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006277 if (done.IsLinked()) {
6278 __ Bind(&done);
6279 }
6280
Roland Levillain0d5a2812015-11-13 10:07:31 +00006281 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006282}
6283
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006284void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6285 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006286 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006287 InvokeRuntimeCallingConvention calling_convention;
6288 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6289}
6290
6291void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006292 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006293 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006294 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006295 if (instruction->IsEnter()) {
6296 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6297 } else {
6298 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6299 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006300}
6301
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006302void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6303void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6304void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6305
6306void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6307 LocationSummary* locations =
6308 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6309 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6310 || instruction->GetResultType() == Primitive::kPrimLong);
6311 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006312 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006313 locations->SetOut(Location::SameAsFirstInput());
6314}
6315
6316void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6317 HandleBitwiseOperation(instruction);
6318}
6319
6320void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6321 HandleBitwiseOperation(instruction);
6322}
6323
6324void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6325 HandleBitwiseOperation(instruction);
6326}
6327
6328void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6329 LocationSummary* locations = instruction->GetLocations();
6330 Location first = locations->InAt(0);
6331 Location second = locations->InAt(1);
6332 DCHECK(first.Equals(locations->Out()));
6333
6334 if (instruction->GetResultType() == Primitive::kPrimInt) {
6335 if (second.IsRegister()) {
6336 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006337 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006338 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006339 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006340 } else {
6341 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006342 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006343 }
6344 } else if (second.IsConstant()) {
6345 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6346 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006347 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006348 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006349 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006350 } else {
6351 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006352 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006353 }
6354 } else {
6355 Address address(CpuRegister(RSP), second.GetStackIndex());
6356 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006357 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006358 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006359 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006360 } else {
6361 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006362 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006363 }
6364 }
6365 } else {
6366 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006367 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6368 bool second_is_constant = false;
6369 int64_t value = 0;
6370 if (second.IsConstant()) {
6371 second_is_constant = true;
6372 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006373 }
Mark Mendell40741f32015-04-20 22:10:34 -04006374 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006375
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006376 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006377 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006378 if (is_int32_value) {
6379 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6380 } else {
6381 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6382 }
6383 } else if (second.IsDoubleStackSlot()) {
6384 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006385 } else {
6386 __ andq(first_reg, second.AsRegister<CpuRegister>());
6387 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006388 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006389 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006390 if (is_int32_value) {
6391 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6392 } else {
6393 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6394 }
6395 } else if (second.IsDoubleStackSlot()) {
6396 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006397 } else {
6398 __ orq(first_reg, second.AsRegister<CpuRegister>());
6399 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006400 } else {
6401 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006402 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006403 if (is_int32_value) {
6404 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6405 } else {
6406 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6407 }
6408 } else if (second.IsDoubleStackSlot()) {
6409 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006410 } else {
6411 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6412 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006413 }
6414 }
6415}
6416
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006417void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6418 HInstruction* instruction,
6419 Location out,
6420 uint32_t offset,
6421 Location maybe_temp,
6422 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006423 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006424 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006425 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006426 if (kUseBakerReadBarrier) {
6427 // Load with fast path based Baker's read barrier.
6428 // /* HeapReference<Object> */ out = *(out + offset)
6429 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006430 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006431 } else {
6432 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006433 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006434 // in the following move operation, as we will need it for the
6435 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006436 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006437 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006438 // /* HeapReference<Object> */ out = *(out + offset)
6439 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006440 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006441 }
6442 } else {
6443 // Plain load with no read barrier.
6444 // /* HeapReference<Object> */ out = *(out + offset)
6445 __ movl(out_reg, Address(out_reg, offset));
6446 __ MaybeUnpoisonHeapReference(out_reg);
6447 }
6448}
6449
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006450void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6451 HInstruction* instruction,
6452 Location out,
6453 Location obj,
6454 uint32_t offset,
6455 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006456 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6457 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006458 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006459 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006460 if (kUseBakerReadBarrier) {
6461 // Load with fast path based Baker's read barrier.
6462 // /* HeapReference<Object> */ out = *(obj + offset)
6463 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006464 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006465 } else {
6466 // Load with slow path based read barrier.
6467 // /* HeapReference<Object> */ out = *(obj + offset)
6468 __ movl(out_reg, Address(obj_reg, offset));
6469 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6470 }
6471 } else {
6472 // Plain load with no read barrier.
6473 // /* HeapReference<Object> */ out = *(obj + offset)
6474 __ movl(out_reg, Address(obj_reg, offset));
6475 __ MaybeUnpoisonHeapReference(out_reg);
6476 }
6477}
6478
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006479void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6480 HInstruction* instruction,
6481 Location root,
6482 const Address& address,
6483 Label* fixup_label,
6484 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006485 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006486 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006487 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006488 if (kUseBakerReadBarrier) {
6489 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6490 // Baker's read barrier are used:
6491 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006492 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006493 // if (Thread::Current()->GetIsGcMarking()) {
6494 // root = ReadBarrier::Mark(root)
6495 // }
6496
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006497 // /* GcRoot<mirror::Object> */ root = *address
6498 __ movl(root_reg, address);
6499 if (fixup_label != nullptr) {
6500 __ Bind(fixup_label);
6501 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006502 static_assert(
6503 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6504 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6505 "have different sizes.");
6506 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6507 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6508 "have different sizes.");
6509
Vladimir Marko953437b2016-08-24 08:30:46 +00006510 // Slow path marking the GC root `root`.
6511 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006512 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006513 codegen_->AddSlowPath(slow_path);
6514
Andreas Gampe542451c2016-07-26 09:02:02 -07006515 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006516 /* no_rip */ true),
6517 Immediate(0));
6518 __ j(kNotEqual, slow_path->GetEntryLabel());
6519 __ Bind(slow_path->GetExitLabel());
6520 } else {
6521 // GC root loaded through a slow path for read barriers other
6522 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006523 // /* GcRoot<mirror::Object>* */ root = address
6524 __ leaq(root_reg, address);
6525 if (fixup_label != nullptr) {
6526 __ Bind(fixup_label);
6527 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006528 // /* mirror::Object* */ root = root->Read()
6529 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6530 }
6531 } else {
6532 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006533 // /* GcRoot<mirror::Object> */ root = *address
6534 __ movl(root_reg, address);
6535 if (fixup_label != nullptr) {
6536 __ Bind(fixup_label);
6537 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006538 // Note that GC roots are not affected by heap poisoning, thus we
6539 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006540 }
6541}
6542
6543void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6544 Location ref,
6545 CpuRegister obj,
6546 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006547 bool needs_null_check) {
6548 DCHECK(kEmitCompilerReadBarrier);
6549 DCHECK(kUseBakerReadBarrier);
6550
6551 // /* HeapReference<Object> */ ref = *(obj + offset)
6552 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006553 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006554}
6555
6556void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6557 Location ref,
6558 CpuRegister obj,
6559 uint32_t data_offset,
6560 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006561 bool needs_null_check) {
6562 DCHECK(kEmitCompilerReadBarrier);
6563 DCHECK(kUseBakerReadBarrier);
6564
Roland Levillain3d312422016-06-23 13:53:42 +01006565 static_assert(
6566 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6567 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006568 // /* HeapReference<Object> */ ref =
6569 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006570 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006571 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006572}
6573
6574void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6575 Location ref,
6576 CpuRegister obj,
6577 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006578 bool needs_null_check,
6579 bool always_update_field,
6580 CpuRegister* temp1,
6581 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006582 DCHECK(kEmitCompilerReadBarrier);
6583 DCHECK(kUseBakerReadBarrier);
6584
6585 // In slow path based read barriers, the read barrier call is
6586 // inserted after the original load. However, in fast path based
6587 // Baker's read barriers, we need to perform the load of
6588 // mirror::Object::monitor_ *before* the original reference load.
6589 // This load-load ordering is required by the read barrier.
6590 // The fast path/slow path (for Baker's algorithm) should look like:
6591 //
6592 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6593 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6594 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006595 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006596 // if (is_gray) {
6597 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6598 // }
6599 //
6600 // Note: the original implementation in ReadBarrier::Barrier is
6601 // slightly more complex as:
6602 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006603 // the high-bits of rb_state, which are expected to be all zeroes
6604 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6605 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006606 // - it performs additional checks that we do not do here for
6607 // performance reasons.
6608
6609 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006610 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6611
Vladimir Marko953437b2016-08-24 08:30:46 +00006612 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006613 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6614 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006615 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6616 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6617 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6618
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006619 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006620 // ref = ReadBarrier::Mark(ref);
6621 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6622 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006623 if (needs_null_check) {
6624 MaybeRecordImplicitNullCheck(instruction);
6625 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006626
6627 // Load fence to prevent load-load reordering.
6628 // Note that this is a no-op, thanks to the x86-64 memory model.
6629 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6630
6631 // The actual reference load.
6632 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006633 __ movl(ref_reg, src); // Flags are unaffected.
6634
6635 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6636 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006637 SlowPathCode* slow_path;
6638 if (always_update_field) {
6639 DCHECK(temp1 != nullptr);
6640 DCHECK(temp2 != nullptr);
6641 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6642 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6643 } else {
6644 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6645 instruction, ref, /* unpoison_ref_before_marking */ true);
6646 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006647 AddSlowPath(slow_path);
6648
6649 // We have done the "if" of the gray bit check above, now branch based on the flags.
6650 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006651
6652 // Object* ref = ref_addr->AsMirrorPtr()
6653 __ MaybeUnpoisonHeapReference(ref_reg);
6654
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006655 __ Bind(slow_path->GetExitLabel());
6656}
6657
6658void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6659 Location out,
6660 Location ref,
6661 Location obj,
6662 uint32_t offset,
6663 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006664 DCHECK(kEmitCompilerReadBarrier);
6665
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006666 // Insert a slow path based read barrier *after* the reference load.
6667 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006668 // If heap poisoning is enabled, the unpoisoning of the loaded
6669 // reference will be carried out by the runtime within the slow
6670 // path.
6671 //
6672 // Note that `ref` currently does not get unpoisoned (when heap
6673 // poisoning is enabled), which is alright as the `ref` argument is
6674 // not used by the artReadBarrierSlow entry point.
6675 //
6676 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6677 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6678 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6679 AddSlowPath(slow_path);
6680
Roland Levillain0d5a2812015-11-13 10:07:31 +00006681 __ jmp(slow_path->GetEntryLabel());
6682 __ Bind(slow_path->GetExitLabel());
6683}
6684
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006685void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6686 Location out,
6687 Location ref,
6688 Location obj,
6689 uint32_t offset,
6690 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006691 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006692 // Baker's read barriers shall be handled by the fast path
6693 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6694 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006695 // If heap poisoning is enabled, unpoisoning will be taken care of
6696 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006697 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006698 } else if (kPoisonHeapReferences) {
6699 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6700 }
6701}
6702
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006703void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6704 Location out,
6705 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006706 DCHECK(kEmitCompilerReadBarrier);
6707
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006708 // Insert a slow path based read barrier *after* the GC root load.
6709 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006710 // Note that GC roots are not affected by heap poisoning, so we do
6711 // not need to do anything special for this here.
6712 SlowPathCode* slow_path =
6713 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6714 AddSlowPath(slow_path);
6715
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 __ jmp(slow_path->GetEntryLabel());
6717 __ Bind(slow_path->GetExitLabel());
6718}
6719
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006720void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006721 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006722 LOG(FATAL) << "Unreachable";
6723}
6724
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006725void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006726 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006727 LOG(FATAL) << "Unreachable";
6728}
6729
Mark Mendellfe57faa2015-09-18 09:26:15 -04006730// Simple implementation of packed switch - generate cascaded compare/jumps.
6731void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6732 LocationSummary* locations =
6733 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6734 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006735 locations->AddTemp(Location::RequiresRegister());
6736 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006737}
6738
6739void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6740 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006741 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006742 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006743 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6744 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6745 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006746 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6747
6748 // Should we generate smaller inline compare/jumps?
6749 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6750 // Figure out the correct compare values and jump conditions.
6751 // Handle the first compare/branch as a special case because it might
6752 // jump to the default case.
6753 DCHECK_GT(num_entries, 2u);
6754 Condition first_condition;
6755 uint32_t index;
6756 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6757 if (lower_bound != 0) {
6758 first_condition = kLess;
6759 __ cmpl(value_reg_in, Immediate(lower_bound));
6760 __ j(first_condition, codegen_->GetLabelOf(default_block));
6761 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6762
6763 index = 1;
6764 } else {
6765 // Handle all the compare/jumps below.
6766 first_condition = kBelow;
6767 index = 0;
6768 }
6769
6770 // Handle the rest of the compare/jumps.
6771 for (; index + 1 < num_entries; index += 2) {
6772 int32_t compare_to_value = lower_bound + index + 1;
6773 __ cmpl(value_reg_in, Immediate(compare_to_value));
6774 // Jump to successors[index] if value < case_value[index].
6775 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6776 // Jump to successors[index + 1] if value == case_value[index + 1].
6777 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6778 }
6779
6780 if (index != num_entries) {
6781 // There are an odd number of entries. Handle the last one.
6782 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006783 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006784 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6785 }
6786
6787 // And the default for any other value.
6788 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6789 __ jmp(codegen_->GetLabelOf(default_block));
6790 }
6791 return;
6792 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006793
6794 // Remove the bias, if needed.
6795 Register value_reg_out = value_reg_in.AsRegister();
6796 if (lower_bound != 0) {
6797 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6798 value_reg_out = temp_reg.AsRegister();
6799 }
6800 CpuRegister value_reg(value_reg_out);
6801
6802 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006803 __ cmpl(value_reg, Immediate(num_entries - 1));
6804 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006805
Mark Mendell9c86b482015-09-18 13:36:07 -04006806 // We are in the range of the table.
6807 // Load the address of the jump table in the constant area.
6808 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006809
Mark Mendell9c86b482015-09-18 13:36:07 -04006810 // Load the (signed) offset from the jump table.
6811 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6812
6813 // Add the offset to the address of the table base.
6814 __ addq(temp_reg, base_reg);
6815
6816 // And jump.
6817 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006818}
6819
Aart Bikc5d47542016-01-27 17:00:35 -08006820void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6821 if (value == 0) {
6822 __ xorl(dest, dest);
6823 } else {
6824 __ movl(dest, Immediate(value));
6825 }
6826}
6827
Mark Mendell92e83bf2015-05-07 11:25:03 -04006828void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6829 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006830 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006831 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006832 } else if (IsUint<32>(value)) {
6833 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006834 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6835 } else {
6836 __ movq(dest, Immediate(value));
6837 }
6838}
6839
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006840void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6841 if (value == 0) {
6842 __ xorps(dest, dest);
6843 } else {
6844 __ movss(dest, LiteralInt32Address(value));
6845 }
6846}
6847
6848void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6849 if (value == 0) {
6850 __ xorpd(dest, dest);
6851 } else {
6852 __ movsd(dest, LiteralInt64Address(value));
6853 }
6854}
6855
6856void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6857 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6858}
6859
6860void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6861 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6862}
6863
Aart Bika19616e2016-02-01 18:57:58 -08006864void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6865 if (value == 0) {
6866 __ testl(dest, dest);
6867 } else {
6868 __ cmpl(dest, Immediate(value));
6869 }
6870}
6871
6872void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6873 if (IsInt<32>(value)) {
6874 if (value == 0) {
6875 __ testq(dest, dest);
6876 } else {
6877 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6878 }
6879 } else {
6880 // Value won't fit in an int.
6881 __ cmpq(dest, LiteralInt64Address(value));
6882 }
6883}
6884
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006885void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6886 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006887 GenerateIntCompare(lhs_reg, rhs);
6888}
6889
6890void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006891 if (rhs.IsConstant()) {
6892 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006893 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006894 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006895 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006896 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006897 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006898 }
6899}
6900
6901void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6902 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6903 if (rhs.IsConstant()) {
6904 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6905 Compare64BitValue(lhs_reg, value);
6906 } else if (rhs.IsDoubleStackSlot()) {
6907 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6908 } else {
6909 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6910 }
6911}
6912
6913Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6914 Location index,
6915 ScaleFactor scale,
6916 uint32_t data_offset) {
6917 return index.IsConstant() ?
6918 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6919 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6920}
6921
Mark Mendellcfa410b2015-05-25 16:02:44 -04006922void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6923 DCHECK(dest.IsDoubleStackSlot());
6924 if (IsInt<32>(value)) {
6925 // Can move directly as an int32 constant.
6926 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6927 Immediate(static_cast<int32_t>(value)));
6928 } else {
6929 Load64BitValue(CpuRegister(TMP), value);
6930 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6931 }
6932}
6933
Mark Mendell9c86b482015-09-18 13:36:07 -04006934/**
6935 * Class to handle late fixup of offsets into constant area.
6936 */
6937class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6938 public:
6939 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6940 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6941
6942 protected:
6943 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6944
6945 CodeGeneratorX86_64* codegen_;
6946
6947 private:
6948 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6949 // Patch the correct offset for the instruction. We use the address of the
6950 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6951 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6952 int32_t relative_position = constant_offset - pos;
6953
6954 // Patch in the right value.
6955 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6956 }
6957
6958 // Location in constant area that the fixup refers to.
6959 size_t offset_into_constant_area_;
6960};
6961
6962/**
6963 t * Class to handle late fixup of offsets to a jump table that will be created in the
6964 * constant area.
6965 */
6966class JumpTableRIPFixup : public RIPFixup {
6967 public:
6968 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6969 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6970
6971 void CreateJumpTable() {
6972 X86_64Assembler* assembler = codegen_->GetAssembler();
6973
6974 // Ensure that the reference to the jump table has the correct offset.
6975 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6976 SetOffset(offset_in_constant_table);
6977
6978 // Compute the offset from the start of the function to this jump table.
6979 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6980
6981 // Populate the jump table with the correct values for the jump table.
6982 int32_t num_entries = switch_instr_->GetNumEntries();
6983 HBasicBlock* block = switch_instr_->GetBlock();
6984 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6985 // The value that we want is the target offset - the position of the table.
6986 for (int32_t i = 0; i < num_entries; i++) {
6987 HBasicBlock* b = successors[i];
6988 Label* l = codegen_->GetLabelOf(b);
6989 DCHECK(l->IsBound());
6990 int32_t offset_to_block = l->Position() - current_table_offset;
6991 assembler->AppendInt32(offset_to_block);
6992 }
6993 }
6994
6995 private:
6996 const HPackedSwitch* switch_instr_;
6997};
6998
Mark Mendellf55c3e02015-03-26 21:07:46 -04006999void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7000 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007001 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007002 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7003 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007004 assembler->Align(4, 0);
7005 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007006
7007 // Populate any jump tables.
7008 for (auto jump_table : fixups_to_jump_tables_) {
7009 jump_table->CreateJumpTable();
7010 }
7011
7012 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007013 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007014 }
7015
7016 // And finish up.
7017 CodeGenerator::Finalize(allocator);
7018}
7019
Mark Mendellf55c3e02015-03-26 21:07:46 -04007020Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7021 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7022 return Address::RIP(fixup);
7023}
7024
7025Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7026 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7027 return Address::RIP(fixup);
7028}
7029
7030Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7031 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7032 return Address::RIP(fixup);
7033}
7034
7035Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7036 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7037 return Address::RIP(fixup);
7038}
7039
Andreas Gampe85b62f22015-09-09 13:15:38 -07007040// TODO: trg as memory.
7041void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7042 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007043 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007044 return;
7045 }
7046
7047 DCHECK_NE(type, Primitive::kPrimVoid);
7048
7049 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7050 if (trg.Equals(return_loc)) {
7051 return;
7052 }
7053
7054 // Let the parallel move resolver take care of all of this.
7055 HParallelMove parallel_move(GetGraph()->GetArena());
7056 parallel_move.AddMove(return_loc, trg, type, nullptr);
7057 GetMoveResolver()->EmitNativeCode(&parallel_move);
7058}
7059
Mark Mendell9c86b482015-09-18 13:36:07 -04007060Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7061 // Create a fixup to be used to create and address the jump table.
7062 JumpTableRIPFixup* table_fixup =
7063 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7064
7065 // We have to populate the jump tables.
7066 fixups_to_jump_tables_.push_back(table_fixup);
7067 return Address::RIP(table_fixup);
7068}
7069
Mark Mendellea5af682015-10-22 17:35:49 -04007070void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7071 const Address& addr_high,
7072 int64_t v,
7073 HInstruction* instruction) {
7074 if (IsInt<32>(v)) {
7075 int32_t v_32 = v;
7076 __ movq(addr_low, Immediate(v_32));
7077 MaybeRecordImplicitNullCheck(instruction);
7078 } else {
7079 // Didn't fit in a register. Do it in pieces.
7080 int32_t low_v = Low32Bits(v);
7081 int32_t high_v = High32Bits(v);
7082 __ movl(addr_low, Immediate(low_v));
7083 MaybeRecordImplicitNullCheck(instruction);
7084 __ movl(addr_high, Immediate(high_v));
7085 }
7086}
7087
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007088void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7089 const uint8_t* roots_data,
7090 const PatchInfo<Label>& info,
7091 uint64_t index_in_table) const {
7092 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7093 uintptr_t address =
7094 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7095 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7096 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7097 dchecked_integral_cast<uint32_t>(address);
7098}
7099
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007100void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7101 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007102 const auto& it = jit_string_roots_.find(
7103 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007104 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007105 PatchJitRootUse(code, roots_data, info, it->second);
7106 }
7107
7108 for (const PatchInfo<Label>& info : jit_class_patches_) {
7109 const auto& it = jit_class_roots_.find(
7110 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7111 DCHECK(it != jit_class_roots_.end());
7112 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007113 }
7114}
7115
Roland Levillain4d027112015-07-01 15:41:14 +01007116#undef __
7117
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007118} // namespace x86_64
7119} // namespace art