blob: 22f7f6b52bb7e924a0e41a9a8d08cd9c683b1119 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700201 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100202 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700203 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400204 }
205
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000206 // We're moving two locations to locations that could overlap, so we need a parallel
207 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000208 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100209 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000210 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100211 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100213 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
214 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100215 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
216 ? kQuickThrowStringBounds
217 : kQuickThrowArrayBounds;
218 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100219 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000220 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100221 }
222
Alexandre Rames8158f282015-08-07 10:26:17 +0100223 bool IsFatal() const OVERRIDE { return true; }
224
Alexandre Rames9931f312015-06-19 14:47:01 +0100225 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
226
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100227 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100228 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
229};
230
Andreas Gampe85b62f22015-09-09 13:15:38 -0700231class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100232 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000233 LoadClassSlowPathX86_64(HLoadClass* cls,
234 HInstruction* at,
235 uint32_t dex_pc,
236 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000237 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000238 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
239 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000242 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000243 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100244 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000246 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000247
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800249 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
250 Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100251 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000252 at_,
253 dex_pc_,
254 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000255 if (do_clinit_) {
256 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
257 } else {
258 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
259 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100260
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000261 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000263 if (out.IsValid()) {
264 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000265 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 }
267
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100269 __ jmp(GetExitLabel());
270 }
271
Alexandre Rames9931f312015-06-19 14:47:01 +0100272 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
273
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100274 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000275 // The class this slow path will load.
276 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100277
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000278 // The instruction where this slow path is happening.
279 // (Might be the load class or an initialization check).
280 HInstruction* const at_;
281
282 // The dex PC of `at_`.
283 const uint32_t dex_pc_;
284
285 // Whether to initialize the class.
286 const bool do_clinit_;
287
288 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100289};
290
Vladimir Markoaad75c62016-10-03 08:46:48 +0000291class LoadStringSlowPathX86_64 : public SlowPathCode {
292 public:
293 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
294
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296 LocationSummary* locations = instruction_->GetLocations();
297 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
298
299 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
300 __ Bind(GetEntryLabel());
301 SaveLiveRegisters(codegen, locations);
302
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800303 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100304 // Custom calling convention: RAX serves as both input and output.
305 __ movl(CpuRegister(RAX), Immediate(string_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000306 x86_64_codegen->InvokeRuntime(kQuickResolveString,
307 instruction_,
308 instruction_->GetDexPc(),
309 this);
310 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
311 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
312 RestoreLiveRegisters(codegen, locations);
313
314 // Store the resolved String to the BSS entry.
315 __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
316 locations->Out().AsRegister<CpuRegister>());
317 Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
318 __ Bind(fixup_label);
319
320 __ jmp(GetExitLabel());
321 }
322
323 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
324
325 private:
326 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
327};
328
Andreas Gampe85b62f22015-09-09 13:15:38 -0700329class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000333
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100336 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000337 DCHECK(instruction_->IsCheckCast()
338 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Roland Levillain0d5a2812015-11-13 10:07:31 +0000340 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000342
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 if (!is_fatal_) {
344 SaveLiveRegisters(codegen, locations);
345 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000346
347 // We're moving two locations to locations that could overlap, so we need a parallel
348 // move resolver.
349 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
352 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800353 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800354 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
355 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000356 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100357 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000359 } else {
360 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800361 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
362 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 if (!is_fatal_) {
366 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000367 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000369
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000370 RestoreLiveRegisters(codegen, locations);
371 __ jmp(GetExitLabel());
372 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 }
374
Alexandre Rames9931f312015-06-19 14:47:01 +0100375 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
376
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000377 bool IsFatal() const OVERRIDE { return is_fatal_; }
378
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000379 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000381
382 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
383};
384
Andreas Gampe85b62f22015-09-09 13:15:38 -0700385class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 public:
Aart Bik42249c32016-01-07 15:33:50 -0800387 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000388 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389
390 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000391 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700392 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100393 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000394 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700395 }
396
Alexandre Rames9931f312015-06-19 14:47:01 +0100397 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
398
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700400 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
401};
402
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100403class ArraySetSlowPathX86_64 : public SlowPathCode {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100406
407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 LocationSummary* locations = instruction_->GetLocations();
409 __ Bind(GetEntryLabel());
410 SaveLiveRegisters(codegen, locations);
411
412 InvokeRuntimeCallingConvention calling_convention;
413 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
414 parallel_move.AddMove(
415 locations->InAt(0),
416 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
417 Primitive::kPrimNot,
418 nullptr);
419 parallel_move.AddMove(
420 locations->InAt(1),
421 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
422 Primitive::kPrimInt,
423 nullptr);
424 parallel_move.AddMove(
425 locations->InAt(2),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
427 Primitive::kPrimNot,
428 nullptr);
429 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
430
Roland Levillain0d5a2812015-11-13 10:07:31 +0000431 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100432 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000433 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100434 RestoreLiveRegisters(codegen, locations);
435 __ jmp(GetExitLabel());
436 }
437
438 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
439
440 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100441 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
442};
443
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100444// Slow path marking an object reference `ref` during a read
445// barrier. The field `obj.field` in the object `obj` holding this
446// reference does not get updated by this slow path after marking (see
447// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
448//
449// This means that after the execution of this slow path, `ref` will
450// always be up-to-date, but `obj.field` may not; i.e., after the
451// flip, `ref` will be a to-space reference, but `obj.field` will
452// probably still be a from-space reference (unless it gets updated by
453// another thread, or if another thread installed another object
454// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000455class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
456 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
458 Location ref,
459 bool unpoison_ref_before_marking)
460 : SlowPathCode(instruction),
461 ref_(ref),
462 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000463 DCHECK(kEmitCompilerReadBarrier);
464 }
465
466 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
467
468 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
469 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100470 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
471 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000472 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100473 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474 DCHECK(instruction_->IsInstanceFieldGet() ||
475 instruction_->IsStaticFieldGet() ||
476 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100477 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000478 instruction_->IsLoadClass() ||
479 instruction_->IsLoadString() ||
480 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100481 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100482 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
483 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 << "Unexpected instruction in read barrier marking slow path: "
485 << instruction_->DebugName();
486
487 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100488 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000489 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100490 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000491 }
Roland Levillain4359e612016-07-20 11:32:19 +0100492 // No need to save live registers; it's taken care of by the
493 // entrypoint. Also, there is no need to update the stack mask,
494 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100496 DCHECK_NE(ref_reg, RSP);
497 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100498 // "Compact" slow path, saving two moves.
499 //
500 // Instead of using the standard runtime calling convention (input
501 // and output in R0):
502 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100504 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100505 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100506 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // of a dedicated entrypoint:
509 //
510 // rX <- ReadBarrierMarkRegX(rX)
511 //
512 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 // This runtime call does not require a stack map.
515 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 __ jmp(GetExitLabel());
517 }
518
519 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100520 // The location (register) of the marked object reference.
521 const Location ref_;
522 // Should the reference in `ref_` be unpoisoned prior to marking it?
523 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000524
525 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
526};
527
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528// Slow path marking an object reference `ref` during a read barrier,
529// and if needed, atomically updating the field `obj.field` in the
530// object `obj` holding this reference after marking (contrary to
531// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
532// `obj.field`).
533//
534// This means that after the execution of this slow path, both `ref`
535// and `obj.field` will be up-to-date; i.e., after the flip, both will
536// hold the same to-space reference (unless another thread installed
537// another object reference (different from `ref`) in `obj.field`).
538class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
539 public:
540 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
541 Location ref,
542 CpuRegister obj,
543 const Address& field_addr,
544 bool unpoison_ref_before_marking,
545 CpuRegister temp1,
546 CpuRegister temp2)
547 : SlowPathCode(instruction),
548 ref_(ref),
549 obj_(obj),
550 field_addr_(field_addr),
551 unpoison_ref_before_marking_(unpoison_ref_before_marking),
552 temp1_(temp1),
553 temp2_(temp2) {
554 DCHECK(kEmitCompilerReadBarrier);
555 }
556
557 const char* GetDescription() const OVERRIDE {
558 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
559 }
560
561 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
562 LocationSummary* locations = instruction_->GetLocations();
563 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
564 Register ref_reg = ref_cpu_reg.AsRegister();
565 DCHECK(locations->CanCall());
566 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
567 // This slow path is only used by the UnsafeCASObject intrinsic.
568 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
569 << "Unexpected instruction in read barrier marking and field updating slow path: "
570 << instruction_->DebugName();
571 DCHECK(instruction_->GetLocations()->Intrinsified());
572 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
573
574 __ Bind(GetEntryLabel());
575 if (unpoison_ref_before_marking_) {
576 // Object* ref = ref_addr->AsMirrorPtr()
577 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
578 }
579
580 // Save the old (unpoisoned) reference.
581 __ movl(temp1_, ref_cpu_reg);
582
583 // No need to save live registers; it's taken care of by the
584 // entrypoint. Also, there is no need to update the stack mask,
585 // as this runtime call will not trigger a garbage collection.
586 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
587 DCHECK_NE(ref_reg, RSP);
588 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
589 // "Compact" slow path, saving two moves.
590 //
591 // Instead of using the standard runtime calling convention (input
592 // and output in R0):
593 //
594 // RDI <- ref
595 // RAX <- ReadBarrierMark(RDI)
596 // ref <- RAX
597 //
598 // we just use rX (the register containing `ref`) as input and output
599 // of a dedicated entrypoint:
600 //
601 // rX <- ReadBarrierMarkRegX(rX)
602 //
603 int32_t entry_point_offset =
604 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
605 // This runtime call does not require a stack map.
606 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
607
608 // If the new reference is different from the old reference,
609 // update the field in the holder (`*field_addr`).
610 //
611 // Note that this field could also hold a different object, if
612 // another thread had concurrently changed it. In that case, the
613 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
614 // operation below would abort the CAS, leaving the field as-is.
615 NearLabel done;
616 __ cmpl(temp1_, ref_cpu_reg);
617 __ j(kEqual, &done);
618
619 // Update the the holder's field atomically. This may fail if
620 // mutator updates before us, but it's OK. This is achived
621 // using a strong compare-and-set (CAS) operation with relaxed
622 // memory synchronization ordering, where the expected value is
623 // the old reference and the desired value is the new reference.
624 // This operation is implemented with a 32-bit LOCK CMPXLCHG
625 // instruction, which requires the expected value (the old
626 // reference) to be in EAX. Save RAX beforehand, and move the
627 // expected value (stored in `temp1_`) into EAX.
628 __ movq(temp2_, CpuRegister(RAX));
629 __ movl(CpuRegister(RAX), temp1_);
630
631 // Convenience aliases.
632 CpuRegister base = obj_;
633 CpuRegister expected = CpuRegister(RAX);
634 CpuRegister value = ref_cpu_reg;
635
636 bool base_equals_value = (base.AsRegister() == value.AsRegister());
637 Register value_reg = ref_reg;
638 if (kPoisonHeapReferences) {
639 if (base_equals_value) {
640 // If `base` and `value` are the same register location, move
641 // `value_reg` to a temporary register. This way, poisoning
642 // `value_reg` won't invalidate `base`.
643 value_reg = temp1_.AsRegister();
644 __ movl(CpuRegister(value_reg), base);
645 }
646
647 // Check that the register allocator did not assign the location
648 // of `expected` (RAX) to `value` nor to `base`, so that heap
649 // poisoning (when enabled) works as intended below.
650 // - If `value` were equal to `expected`, both references would
651 // be poisoned twice, meaning they would not be poisoned at
652 // all, as heap poisoning uses address negation.
653 // - If `base` were equal to `expected`, poisoning `expected`
654 // would invalidate `base`.
655 DCHECK_NE(value_reg, expected.AsRegister());
656 DCHECK_NE(base.AsRegister(), expected.AsRegister());
657
658 __ PoisonHeapReference(expected);
659 __ PoisonHeapReference(CpuRegister(value_reg));
660 }
661
662 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
663
664 // If heap poisoning is enabled, we need to unpoison the values
665 // that were poisoned earlier.
666 if (kPoisonHeapReferences) {
667 if (base_equals_value) {
668 // `value_reg` has been moved to a temporary register, no need
669 // to unpoison it.
670 } else {
671 __ UnpoisonHeapReference(CpuRegister(value_reg));
672 }
673 // No need to unpoison `expected` (RAX), as it is be overwritten below.
674 }
675
676 // Restore RAX.
677 __ movq(CpuRegister(RAX), temp2_);
678
679 __ Bind(&done);
680 __ jmp(GetExitLabel());
681 }
682
683 private:
684 // The location (register) of the marked object reference.
685 const Location ref_;
686 // The register containing the object holding the marked object reference field.
687 const CpuRegister obj_;
688 // The address of the marked reference field. The base of this address must be `obj_`.
689 const Address field_addr_;
690
691 // Should the reference in `ref_` be unpoisoned prior to marking it?
692 const bool unpoison_ref_before_marking_;
693
694 const CpuRegister temp1_;
695 const CpuRegister temp2_;
696
697 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
698};
699
Roland Levillain0d5a2812015-11-13 10:07:31 +0000700// Slow path generating a read barrier for a heap reference.
701class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
702 public:
703 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
704 Location out,
705 Location ref,
706 Location obj,
707 uint32_t offset,
708 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000709 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710 out_(out),
711 ref_(ref),
712 obj_(obj),
713 offset_(offset),
714 index_(index) {
715 DCHECK(kEmitCompilerReadBarrier);
716 // If `obj` is equal to `out` or `ref`, it means the initial
717 // object has been overwritten by (or after) the heap object
718 // reference load to be instrumented, e.g.:
719 //
720 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000721 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000722 //
723 // In that case, we have lost the information about the original
724 // object, and the emitted read barrier cannot work properly.
725 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
726 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
727}
728
729 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
730 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
731 LocationSummary* locations = instruction_->GetLocations();
732 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
733 DCHECK(locations->CanCall());
734 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100735 DCHECK(instruction_->IsInstanceFieldGet() ||
736 instruction_->IsStaticFieldGet() ||
737 instruction_->IsArrayGet() ||
738 instruction_->IsInstanceOf() ||
739 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100740 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000741 << "Unexpected instruction in read barrier for heap reference slow path: "
742 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743
744 __ Bind(GetEntryLabel());
745 SaveLiveRegisters(codegen, locations);
746
747 // We may have to change the index's value, but as `index_` is a
748 // constant member (like other "inputs" of this slow path),
749 // introduce a copy of it, `index`.
750 Location index = index_;
751 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100752 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753 if (instruction_->IsArrayGet()) {
754 // Compute real offset and store it in index_.
755 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
756 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
757 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
758 // We are about to change the value of `index_reg` (see the
759 // calls to art::x86_64::X86_64Assembler::shll and
760 // art::x86_64::X86_64Assembler::AddImmediate below), but it
761 // has not been saved by the previous call to
762 // art::SlowPathCode::SaveLiveRegisters, as it is a
763 // callee-save register --
764 // art::SlowPathCode::SaveLiveRegisters does not consider
765 // callee-save registers, as it has been designed with the
766 // assumption that callee-save registers are supposed to be
767 // handled by the called function. So, as a callee-save
768 // register, `index_reg` _would_ eventually be saved onto
769 // the stack, but it would be too late: we would have
770 // changed its value earlier. Therefore, we manually save
771 // it here into another freely available register,
772 // `free_reg`, chosen of course among the caller-save
773 // registers (as a callee-save `free_reg` register would
774 // exhibit the same problem).
775 //
776 // Note we could have requested a temporary register from
777 // the register allocator instead; but we prefer not to, as
778 // this is a slow path, and we know we can find a
779 // caller-save register that is available.
780 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
781 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
782 index_reg = free_reg;
783 index = Location::RegisterLocation(index_reg);
784 } else {
785 // The initial register stored in `index_` has already been
786 // saved in the call to art::SlowPathCode::SaveLiveRegisters
787 // (as it is not a callee-save register), so we can freely
788 // use it.
789 }
790 // Shifting the index value contained in `index_reg` by the
791 // scale factor (2) cannot overflow in practice, as the
792 // runtime is unable to allocate object arrays with a size
793 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
794 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
795 static_assert(
796 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
797 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
798 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
799 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100800 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
801 // intrinsics, `index_` is not shifted by a scale factor of 2
802 // (as in the case of ArrayGet), as it is actually an offset
803 // to an object field within an object.
804 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000805 DCHECK(instruction_->GetLocations()->Intrinsified());
806 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
807 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
808 << instruction_->AsInvoke()->GetIntrinsic();
809 DCHECK_EQ(offset_, 0U);
810 DCHECK(index_.IsRegister());
811 }
812 }
813
814 // We're moving two or three locations to locations that could
815 // overlap, so we need a parallel move resolver.
816 InvokeRuntimeCallingConvention calling_convention;
817 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
818 parallel_move.AddMove(ref_,
819 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
820 Primitive::kPrimNot,
821 nullptr);
822 parallel_move.AddMove(obj_,
823 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
824 Primitive::kPrimNot,
825 nullptr);
826 if (index.IsValid()) {
827 parallel_move.AddMove(index,
828 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
829 Primitive::kPrimInt,
830 nullptr);
831 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
832 } else {
833 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
834 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
835 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100836 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000837 instruction_,
838 instruction_->GetDexPc(),
839 this);
840 CheckEntrypointTypes<
841 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
842 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
843
844 RestoreLiveRegisters(codegen, locations);
845 __ jmp(GetExitLabel());
846 }
847
848 const char* GetDescription() const OVERRIDE {
849 return "ReadBarrierForHeapReferenceSlowPathX86_64";
850 }
851
852 private:
853 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
854 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
855 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
856 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
857 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
858 return static_cast<CpuRegister>(i);
859 }
860 }
861 // We shall never fail to find a free caller-save register, as
862 // there are more than two core caller-save registers on x86-64
863 // (meaning it is possible to find one which is different from
864 // `ref` and `obj`).
865 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
866 LOG(FATAL) << "Could not find a free caller-save register";
867 UNREACHABLE();
868 }
869
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 const Location out_;
871 const Location ref_;
872 const Location obj_;
873 const uint32_t offset_;
874 // An additional location containing an index to an array.
875 // Only used for HArrayGet and the UnsafeGetObject &
876 // UnsafeGetObjectVolatile intrinsics.
877 const Location index_;
878
879 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
880};
881
882// Slow path generating a read barrier for a GC root.
883class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
884 public:
885 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000886 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000887 DCHECK(kEmitCompilerReadBarrier);
888 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889
890 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
891 LocationSummary* locations = instruction_->GetLocations();
892 DCHECK(locations->CanCall());
893 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000894 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
895 << "Unexpected instruction in read barrier for GC root slow path: "
896 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000897
898 __ Bind(GetEntryLabel());
899 SaveLiveRegisters(codegen, locations);
900
901 InvokeRuntimeCallingConvention calling_convention;
902 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
903 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100904 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000905 instruction_,
906 instruction_->GetDexPc(),
907 this);
908 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
909 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
910
911 RestoreLiveRegisters(codegen, locations);
912 __ jmp(GetExitLabel());
913 }
914
915 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
916
917 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918 const Location out_;
919 const Location root_;
920
921 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
922};
923
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100924#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100925// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
926#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100927
Roland Levillain4fa13f62015-07-06 18:11:54 +0100928inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700929 switch (cond) {
930 case kCondEQ: return kEqual;
931 case kCondNE: return kNotEqual;
932 case kCondLT: return kLess;
933 case kCondLE: return kLessEqual;
934 case kCondGT: return kGreater;
935 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700936 case kCondB: return kBelow;
937 case kCondBE: return kBelowEqual;
938 case kCondA: return kAbove;
939 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700940 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100941 LOG(FATAL) << "Unreachable";
942 UNREACHABLE();
943}
944
Aart Bike9f37602015-10-09 11:15:55 -0700945// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100946inline Condition X86_64FPCondition(IfCondition cond) {
947 switch (cond) {
948 case kCondEQ: return kEqual;
949 case kCondNE: return kNotEqual;
950 case kCondLT: return kBelow;
951 case kCondLE: return kBelowEqual;
952 case kCondGT: return kAbove;
953 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700954 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100955 };
956 LOG(FATAL) << "Unreachable";
957 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700958}
959
Vladimir Markodc151b22015-10-15 18:02:30 +0100960HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
961 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100962 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +0100963 switch (desired_dispatch_info.code_ptr_location) {
964 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
965 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
966 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
967 return HInvokeStaticOrDirect::DispatchInfo {
968 desired_dispatch_info.method_load_kind,
969 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
970 desired_dispatch_info.method_load_data,
971 0u
972 };
973 default:
974 return desired_dispatch_info;
975 }
976}
977
Serguei Katkov288c7a82016-05-16 11:53:15 +0600978Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
979 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800980 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000981 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
982 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100983 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000984 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100985 uint32_t offset =
986 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
987 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000988 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100989 }
Vladimir Marko58155012015-08-19 12:49:41 +0000990 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000991 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000992 break;
993 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
994 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
995 break;
996 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
997 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000998 method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
999 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001000 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
1001 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001002 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +00001003 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001004 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001005 // Bind a new fixup label at the end of the "movl" insn.
1006 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01001007 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +00001008 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001009 }
Vladimir Marko58155012015-08-19 12:49:41 +00001010 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00001011 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001012 Register method_reg;
1013 CpuRegister reg = temp.AsRegister<CpuRegister>();
1014 if (current_method.IsRegister()) {
1015 method_reg = current_method.AsRegister<Register>();
1016 } else {
1017 DCHECK(invoke->GetLocations()->Intrinsified());
1018 DCHECK(!current_method.IsValid());
1019 method_reg = reg.AsRegister();
1020 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1021 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001022 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01001023 __ movq(reg,
1024 Address(CpuRegister(method_reg),
1025 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01001026 // temp = temp[index_in_cache];
1027 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
1028 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00001029 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
1030 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01001031 }
Vladimir Marko58155012015-08-19 12:49:41 +00001032 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06001033 return callee_method;
1034}
1035
1036void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
1037 Location temp) {
1038 // All registers are assumed to be correctly set up.
1039 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00001040
1041 switch (invoke->GetCodePtrLocation()) {
1042 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1043 __ call(&frame_entry_label_);
1044 break;
1045 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001046 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
1047 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00001048 Label* label = &relative_call_patches_.back().label;
1049 __ call(label); // Bind to the patch label, override at link time.
1050 __ Bind(label); // Bind the label at the end of the "call" insn.
1051 break;
1052 }
1053 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
1054 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +01001055 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
1056 LOG(FATAL) << "Unsupported";
1057 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00001058 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1059 // (callee_method + offset_of_quick_compiled_code)()
1060 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1061 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001062 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001063 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001064 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001065
1066 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001067}
1068
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001069void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
1070 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1071 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1072 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001073
1074 // Use the calling convention instead of the location of the receiver, as
1075 // intrinsics may have put the receiver in a different register. In the intrinsics
1076 // slow path, the arguments have been moved to the right place, so here we are
1077 // guaranteed that the receiver is the first register of the calling convention.
1078 InvokeDexCallingConvention calling_convention;
1079 Register receiver = calling_convention.GetRegisterAt(0);
1080
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001081 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001082 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001083 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001084 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001085 // Instead of simply (possibly) unpoisoning `temp` here, we should
1086 // emit a read barrier for the previous class reference load.
1087 // However this is not required in practice, as this is an
1088 // intermediate/temporary reference and because the current
1089 // concurrent copying collector keeps the from-space memory
1090 // intact/accessible until the end of the marking phase (the
1091 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001092 __ MaybeUnpoisonHeapReference(temp);
1093 // temp = temp->GetMethodAt(method_offset);
1094 __ movq(temp, Address(temp, method_offset));
1095 // call temp->GetEntryPoint();
1096 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001097 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001098}
1099
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001100void CodeGeneratorX86_64::RecordSimplePatch() {
1101 if (GetCompilerOptions().GetIncludePatchInformation()) {
1102 simple_patches_.emplace_back();
1103 __ Bind(&simple_patches_.back());
1104 }
1105}
1106
Vladimir Markoaad75c62016-10-03 08:46:48 +00001107void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
1108 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001109 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001110 __ Bind(&string_patches_.back().label);
1111}
1112
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001113void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08001114 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001115 __ Bind(&type_patches_.back().label);
1116}
1117
Vladimir Markoaad75c62016-10-03 08:46:48 +00001118Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1119 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08001120 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001121 return &string_patches_.back().label;
1122}
1123
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001124Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
1125 uint32_t element_offset) {
1126 // Add a patch entry and return the label.
1127 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
1128 return &pc_relative_dex_cache_patches_.back().label;
1129}
1130
Vladimir Markoaad75c62016-10-03 08:46:48 +00001131// The label points to the end of the "movl" or another instruction but the literal offset
1132// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1133constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1134
1135template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1136inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1137 const ArenaDeque<PatchInfo<Label>>& infos,
1138 ArenaVector<LinkerPatch>* linker_patches) {
1139 for (const PatchInfo<Label>& info : infos) {
1140 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1141 linker_patches->push_back(
1142 Factory(literal_offset, &info.dex_file, info.label.Position(), info.index));
1143 }
1144}
1145
Vladimir Marko58155012015-08-19 12:49:41 +00001146void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1147 DCHECK(linker_patches->empty());
1148 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001149 method_patches_.size() +
1150 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001151 pc_relative_dex_cache_patches_.size() +
1152 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001153 string_patches_.size() +
1154 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001155 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001156 for (const PatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001157 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001158 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001159 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001160 for (const PatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001161 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001162 linker_patches->push_back(
1163 LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00001164 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001165 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1166 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001167 for (const Label& label : simple_patches_) {
1168 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1169 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
1170 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171 if (!GetCompilerOptions().IsBootImage()) {
1172 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
1173 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001174 // These are always PC-relative, see GetSupportedLoadStringKind().
Vladimir Markoaad75c62016-10-03 08:46:48 +00001175 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001176 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001177 // These are always PC-relative, see GetSupportedLoadClassKind().
1178 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko58155012015-08-19 12:49:41 +00001179}
1180
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001181void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001182 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001183}
1184
1185void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001186 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001187}
1188
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001189size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1190 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1191 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001192}
1193
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001194size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1195 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1196 return kX86_64WordSize;
1197}
1198
1199size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1200 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1201 return kX86_64WordSize;
1202}
1203
1204size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1205 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1206 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001207}
1208
Calin Juravle175dc732015-08-25 15:42:32 +01001209void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1210 HInstruction* instruction,
1211 uint32_t dex_pc,
1212 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001213 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001214 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1215 if (EntrypointRequiresStackMap(entrypoint)) {
1216 RecordPcInfo(instruction, dex_pc, slow_path);
1217 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001218}
1219
Roland Levillaindec8f632016-07-22 17:10:06 +01001220void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1221 HInstruction* instruction,
1222 SlowPathCode* slow_path) {
1223 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001224 GenerateInvokeRuntime(entry_point_offset);
1225}
1226
1227void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001228 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1229}
1230
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001231static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001232// Use a fake return address register to mimic Quick.
1233static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001234CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001235 const X86_64InstructionSetFeatures& isa_features,
1236 const CompilerOptions& compiler_options,
1237 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001238 : CodeGenerator(graph,
1239 kNumberOfCpuRegisters,
1240 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001241 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001242 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1243 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001244 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001245 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1246 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001247 compiler_options,
1248 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001249 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001251 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001252 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001253 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001254 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001255 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001256 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1257 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001258 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001259 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1260 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001261 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001262 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1263 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001264 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1265}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001266
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001267InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1268 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001269 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001270 assembler_(codegen->GetAssembler()),
1271 codegen_(codegen) {}
1272
David Brazdil58282f42016-01-14 12:45:10 +00001273void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001274 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001275 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001276
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001277 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001278 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001279}
1280
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001281static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001282 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001283}
David Srbecky9d8606d2015-04-12 09:35:32 +01001284
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001285static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001286 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001287}
1288
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001289void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001290 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001291 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001292 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001293 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001294 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001295
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001296 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001297 __ testq(CpuRegister(RAX), Address(
1298 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001299 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001300 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001301
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001302 if (HasEmptyFrame()) {
1303 return;
1304 }
1305
Nicolas Geoffray98893962015-01-21 12:32:32 +00001306 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001307 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001308 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001309 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001310 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1311 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001312 }
1313 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001314
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001315 int adjust = GetFrameSize() - GetCoreSpillSize();
1316 __ subq(CpuRegister(RSP), Immediate(adjust));
1317 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001318 uint32_t xmm_spill_location = GetFpuSpillStart();
1319 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001320
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001321 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1322 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001323 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1324 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1325 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001326 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001327 }
1328
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001329 // Save the current method if we need it. Note that we do not
1330 // do this in HCurrentMethod, as the instruction might have been removed
1331 // in the SSA graph.
1332 if (RequiresCurrentMethod()) {
1333 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1334 CpuRegister(kMethodRegisterArgument));
1335 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336}
1337
1338void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001339 __ cfi().RememberState();
1340 if (!HasEmptyFrame()) {
1341 uint32_t xmm_spill_location = GetFpuSpillStart();
1342 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1343 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1344 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1345 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1346 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1347 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1348 }
1349 }
1350
1351 int adjust = GetFrameSize() - GetCoreSpillSize();
1352 __ addq(CpuRegister(RSP), Immediate(adjust));
1353 __ cfi().AdjustCFAOffset(-adjust);
1354
1355 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1356 Register reg = kCoreCalleeSaves[i];
1357 if (allocated_registers_.ContainsCoreRegister(reg)) {
1358 __ popq(CpuRegister(reg));
1359 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1360 __ cfi().Restore(DWARFReg(reg));
1361 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001362 }
1363 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001364 __ ret();
1365 __ cfi().RestoreState();
1366 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001367}
1368
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001369void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1370 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001371}
1372
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001373void CodeGeneratorX86_64::Move(Location destination, Location source) {
1374 if (source.Equals(destination)) {
1375 return;
1376 }
1377 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001378 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001379 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001380 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001381 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001382 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001383 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001384 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1385 } else if (source.IsConstant()) {
1386 HConstant* constant = source.GetConstant();
1387 if (constant->IsLongConstant()) {
1388 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1389 } else {
1390 Load32BitValue(dest, GetInt32ValueOf(constant));
1391 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001392 } else {
1393 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001394 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001395 }
1396 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001397 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001398 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001399 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001400 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001401 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1402 } else if (source.IsConstant()) {
1403 HConstant* constant = source.GetConstant();
1404 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1405 if (constant->IsFloatConstant()) {
1406 Load32BitValue(dest, static_cast<int32_t>(value));
1407 } else {
1408 Load64BitValue(dest, value);
1409 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001410 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001411 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001412 } else {
1413 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001414 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001415 }
1416 } else if (destination.IsStackSlot()) {
1417 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001418 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001419 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001420 } else if (source.IsFpuRegister()) {
1421 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001422 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001423 } else if (source.IsConstant()) {
1424 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001425 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001426 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001427 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001428 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001429 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1430 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001431 }
1432 } else {
1433 DCHECK(destination.IsDoubleStackSlot());
1434 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001435 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001436 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001437 } else if (source.IsFpuRegister()) {
1438 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001439 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001440 } else if (source.IsConstant()) {
1441 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001442 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1443 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001444 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001445 } else {
1446 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001447 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1448 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001449 }
1450 }
1451}
1452
Calin Juravle175dc732015-08-25 15:42:32 +01001453void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1454 DCHECK(location.IsRegister());
1455 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1456}
1457
Calin Juravlee460d1d2015-09-29 04:52:17 +01001458void CodeGeneratorX86_64::MoveLocation(
1459 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1460 Move(dst, src);
1461}
1462
1463void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1464 if (location.IsRegister()) {
1465 locations->AddTemp(location);
1466 } else {
1467 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1468 }
1469}
1470
David Brazdilfc6a86a2015-06-26 10:33:45 +00001471void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001472 DCHECK(!successor->IsExitBlock());
1473
1474 HBasicBlock* block = got->GetBlock();
1475 HInstruction* previous = got->GetPrevious();
1476
1477 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001478 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001479 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1480 return;
1481 }
1482
1483 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1484 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1485 }
1486 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001487 __ jmp(codegen_->GetLabelOf(successor));
1488 }
1489}
1490
David Brazdilfc6a86a2015-06-26 10:33:45 +00001491void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1492 got->SetLocations(nullptr);
1493}
1494
1495void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1496 HandleGoto(got, got->GetSuccessor());
1497}
1498
1499void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1500 try_boundary->SetLocations(nullptr);
1501}
1502
1503void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1504 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1505 if (!successor->IsExitBlock()) {
1506 HandleGoto(try_boundary, successor);
1507 }
1508}
1509
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001510void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1511 exit->SetLocations(nullptr);
1512}
1513
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001514void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001515}
1516
Mark Mendell152408f2015-12-31 12:28:50 -05001517template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001518void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001519 LabelType* true_label,
1520 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001521 if (cond->IsFPConditionTrueIfNaN()) {
1522 __ j(kUnordered, true_label);
1523 } else if (cond->IsFPConditionFalseIfNaN()) {
1524 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001525 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001526 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001527}
1528
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001529void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001530 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001531
Mark Mendellc4701932015-04-10 13:18:51 -04001532 Location left = locations->InAt(0);
1533 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001534 Primitive::Type type = condition->InputAt(0)->GetType();
1535 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001536 case Primitive::kPrimBoolean:
1537 case Primitive::kPrimByte:
1538 case Primitive::kPrimChar:
1539 case Primitive::kPrimShort:
1540 case Primitive::kPrimInt:
1541 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001542 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001543 break;
1544 }
Mark Mendellc4701932015-04-10 13:18:51 -04001545 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001546 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001547 break;
1548 }
1549 case Primitive::kPrimFloat: {
1550 if (right.IsFpuRegister()) {
1551 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1552 } else if (right.IsConstant()) {
1553 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1554 codegen_->LiteralFloatAddress(
1555 right.GetConstant()->AsFloatConstant()->GetValue()));
1556 } else {
1557 DCHECK(right.IsStackSlot());
1558 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1559 Address(CpuRegister(RSP), right.GetStackIndex()));
1560 }
Mark Mendellc4701932015-04-10 13:18:51 -04001561 break;
1562 }
1563 case Primitive::kPrimDouble: {
1564 if (right.IsFpuRegister()) {
1565 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1566 } else if (right.IsConstant()) {
1567 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1568 codegen_->LiteralDoubleAddress(
1569 right.GetConstant()->AsDoubleConstant()->GetValue()));
1570 } else {
1571 DCHECK(right.IsDoubleStackSlot());
1572 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1573 Address(CpuRegister(RSP), right.GetStackIndex()));
1574 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001575 break;
1576 }
1577 default:
1578 LOG(FATAL) << "Unexpected condition type " << type;
1579 }
1580}
1581
1582template<class LabelType>
1583void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1584 LabelType* true_target_in,
1585 LabelType* false_target_in) {
1586 // Generated branching requires both targets to be explicit. If either of the
1587 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1588 LabelType fallthrough_target;
1589 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1590 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1591
1592 // Generate the comparison to set the CC.
1593 GenerateCompareTest(condition);
1594
1595 // Now generate the correct jump(s).
1596 Primitive::Type type = condition->InputAt(0)->GetType();
1597 switch (type) {
1598 case Primitive::kPrimLong: {
1599 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1600 break;
1601 }
1602 case Primitive::kPrimFloat: {
1603 GenerateFPJumps(condition, true_target, false_target);
1604 break;
1605 }
1606 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001607 GenerateFPJumps(condition, true_target, false_target);
1608 break;
1609 }
1610 default:
1611 LOG(FATAL) << "Unexpected condition type " << type;
1612 }
1613
David Brazdil0debae72015-11-12 18:37:00 +00001614 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001615 __ jmp(false_target);
1616 }
David Brazdil0debae72015-11-12 18:37:00 +00001617
1618 if (fallthrough_target.IsLinked()) {
1619 __ Bind(&fallthrough_target);
1620 }
Mark Mendellc4701932015-04-10 13:18:51 -04001621}
1622
David Brazdil0debae72015-11-12 18:37:00 +00001623static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1624 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1625 // are set only strictly before `branch`. We can't use the eflags on long
1626 // conditions if they are materialized due to the complex branching.
1627 return cond->IsCondition() &&
1628 cond->GetNext() == branch &&
1629 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1630}
1631
Mark Mendell152408f2015-12-31 12:28:50 -05001632template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001633void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001634 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001635 LabelType* true_target,
1636 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001637 HInstruction* cond = instruction->InputAt(condition_input_index);
1638
1639 if (true_target == nullptr && false_target == nullptr) {
1640 // Nothing to do. The code always falls through.
1641 return;
1642 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001643 // Constant condition, statically compared against "true" (integer value 1).
1644 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001645 if (true_target != nullptr) {
1646 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001647 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001648 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001649 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001650 if (false_target != nullptr) {
1651 __ jmp(false_target);
1652 }
1653 }
1654 return;
1655 }
1656
1657 // The following code generates these patterns:
1658 // (1) true_target == nullptr && false_target != nullptr
1659 // - opposite condition true => branch to false_target
1660 // (2) true_target != nullptr && false_target == nullptr
1661 // - condition true => branch to true_target
1662 // (3) true_target != nullptr && false_target != nullptr
1663 // - condition true => branch to true_target
1664 // - branch to false_target
1665 if (IsBooleanValueOrMaterializedCondition(cond)) {
1666 if (AreEflagsSetFrom(cond, instruction)) {
1667 if (true_target == nullptr) {
1668 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1669 } else {
1670 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1671 }
1672 } else {
1673 // Materialized condition, compare against 0.
1674 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1675 if (lhs.IsRegister()) {
1676 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1677 } else {
1678 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1679 }
1680 if (true_target == nullptr) {
1681 __ j(kEqual, false_target);
1682 } else {
1683 __ j(kNotEqual, true_target);
1684 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001685 }
1686 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001687 // Condition has not been materialized, use its inputs as the
1688 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001689 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001690
David Brazdil0debae72015-11-12 18:37:00 +00001691 // If this is a long or FP comparison that has been folded into
1692 // the HCondition, generate the comparison directly.
1693 Primitive::Type type = condition->InputAt(0)->GetType();
1694 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1695 GenerateCompareTestAndBranch(condition, true_target, false_target);
1696 return;
1697 }
1698
1699 Location lhs = condition->GetLocations()->InAt(0);
1700 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001701 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001702 if (true_target == nullptr) {
1703 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1704 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001705 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001706 }
Dave Allison20dfc792014-06-16 20:44:29 -07001707 }
David Brazdil0debae72015-11-12 18:37:00 +00001708
1709 // If neither branch falls through (case 3), the conditional branch to `true_target`
1710 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1711 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001712 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001713 }
1714}
1715
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001716void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001717 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1718 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001719 locations->SetInAt(0, Location::Any());
1720 }
1721}
1722
1723void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001724 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1725 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1726 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1727 nullptr : codegen_->GetLabelOf(true_successor);
1728 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1729 nullptr : codegen_->GetLabelOf(false_successor);
1730 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001731}
1732
1733void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1734 LocationSummary* locations = new (GetGraph()->GetArena())
1735 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001736 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001737 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001738 locations->SetInAt(0, Location::Any());
1739 }
1740}
1741
1742void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001743 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001744 GenerateTestAndBranch<Label>(deoptimize,
1745 /* condition_input_index */ 0,
1746 slow_path->GetEntryLabel(),
1747 /* false_target */ nullptr);
1748}
1749
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001750static bool SelectCanUseCMOV(HSelect* select) {
1751 // There are no conditional move instructions for XMMs.
1752 if (Primitive::IsFloatingPointType(select->GetType())) {
1753 return false;
1754 }
1755
1756 // A FP condition doesn't generate the single CC that we need.
1757 HInstruction* condition = select->GetCondition();
1758 if (condition->IsCondition() &&
1759 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1760 return false;
1761 }
1762
1763 // We can generate a CMOV for this Select.
1764 return true;
1765}
1766
David Brazdil74eb1b22015-12-14 11:44:01 +00001767void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1768 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1769 if (Primitive::IsFloatingPointType(select->GetType())) {
1770 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001771 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001772 } else {
1773 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001774 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001775 if (select->InputAt(1)->IsConstant()) {
1776 locations->SetInAt(1, Location::RequiresRegister());
1777 } else {
1778 locations->SetInAt(1, Location::Any());
1779 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001780 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001781 locations->SetInAt(1, Location::Any());
1782 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001783 }
1784 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1785 locations->SetInAt(2, Location::RequiresRegister());
1786 }
1787 locations->SetOut(Location::SameAsFirstInput());
1788}
1789
1790void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1791 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001792 if (SelectCanUseCMOV(select)) {
1793 // If both the condition and the source types are integer, we can generate
1794 // a CMOV to implement Select.
1795 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001796 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001797 DCHECK(locations->InAt(0).Equals(locations->Out()));
1798
1799 HInstruction* select_condition = select->GetCondition();
1800 Condition cond = kNotEqual;
1801
1802 // Figure out how to test the 'condition'.
1803 if (select_condition->IsCondition()) {
1804 HCondition* condition = select_condition->AsCondition();
1805 if (!condition->IsEmittedAtUseSite()) {
1806 // This was a previously materialized condition.
1807 // Can we use the existing condition code?
1808 if (AreEflagsSetFrom(condition, select)) {
1809 // Materialization was the previous instruction. Condition codes are right.
1810 cond = X86_64IntegerCondition(condition->GetCondition());
1811 } else {
1812 // No, we have to recreate the condition code.
1813 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1814 __ testl(cond_reg, cond_reg);
1815 }
1816 } else {
1817 GenerateCompareTest(condition);
1818 cond = X86_64IntegerCondition(condition->GetCondition());
1819 }
1820 } else {
1821 // Must be a boolean condition, which needs to be compared to 0.
1822 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1823 __ testl(cond_reg, cond_reg);
1824 }
1825
1826 // If the condition is true, overwrite the output, which already contains false.
1827 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001828 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1829 if (value_true_loc.IsRegister()) {
1830 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1831 } else {
1832 __ cmov(cond,
1833 value_false,
1834 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1835 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001836 } else {
1837 NearLabel false_target;
1838 GenerateTestAndBranch<NearLabel>(select,
1839 /* condition_input_index */ 2,
1840 /* true_target */ nullptr,
1841 &false_target);
1842 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1843 __ Bind(&false_target);
1844 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001845}
1846
David Srbecky0cf44932015-12-09 14:09:59 +00001847void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1848 new (GetGraph()->GetArena()) LocationSummary(info);
1849}
1850
David Srbeckyd28f4a02016-03-14 17:14:24 +00001851void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1852 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001853}
1854
1855void CodeGeneratorX86_64::GenerateNop() {
1856 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001857}
1858
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001859void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001860 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001861 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001862 // Handle the long/FP comparisons made in instruction simplification.
1863 switch (cond->InputAt(0)->GetType()) {
1864 case Primitive::kPrimLong:
1865 locations->SetInAt(0, Location::RequiresRegister());
1866 locations->SetInAt(1, Location::Any());
1867 break;
1868 case Primitive::kPrimFloat:
1869 case Primitive::kPrimDouble:
1870 locations->SetInAt(0, Location::RequiresFpuRegister());
1871 locations->SetInAt(1, Location::Any());
1872 break;
1873 default:
1874 locations->SetInAt(0, Location::RequiresRegister());
1875 locations->SetInAt(1, Location::Any());
1876 break;
1877 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001878 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001879 locations->SetOut(Location::RequiresRegister());
1880 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001881}
1882
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001883void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001884 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001885 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001886 }
Mark Mendellc4701932015-04-10 13:18:51 -04001887
1888 LocationSummary* locations = cond->GetLocations();
1889 Location lhs = locations->InAt(0);
1890 Location rhs = locations->InAt(1);
1891 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001892 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001893
1894 switch (cond->InputAt(0)->GetType()) {
1895 default:
1896 // Integer case.
1897
1898 // Clear output register: setcc only sets the low byte.
1899 __ xorl(reg, reg);
1900
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001901 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001902 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001903 return;
1904 case Primitive::kPrimLong:
1905 // Clear output register: setcc only sets the low byte.
1906 __ xorl(reg, reg);
1907
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001908 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001909 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001910 return;
1911 case Primitive::kPrimFloat: {
1912 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1913 if (rhs.IsConstant()) {
1914 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1915 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1916 } else if (rhs.IsStackSlot()) {
1917 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1918 } else {
1919 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1920 }
1921 GenerateFPJumps(cond, &true_label, &false_label);
1922 break;
1923 }
1924 case Primitive::kPrimDouble: {
1925 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1926 if (rhs.IsConstant()) {
1927 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1928 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1929 } else if (rhs.IsDoubleStackSlot()) {
1930 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1931 } else {
1932 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1933 }
1934 GenerateFPJumps(cond, &true_label, &false_label);
1935 break;
1936 }
1937 }
1938
1939 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001940 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001941
Roland Levillain4fa13f62015-07-06 18:11:54 +01001942 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001943 __ Bind(&false_label);
1944 __ xorl(reg, reg);
1945 __ jmp(&done_label);
1946
Roland Levillain4fa13f62015-07-06 18:11:54 +01001947 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001948 __ Bind(&true_label);
1949 __ movl(reg, Immediate(1));
1950 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001951}
1952
1953void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001954 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001955}
1956
1957void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001958 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001959}
1960
1961void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001962 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001963}
1964
1965void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001966 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001967}
1968
1969void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001970 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001971}
1972
1973void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001974 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001975}
1976
1977void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001978 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001979}
1980
1981void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001982 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001983}
1984
1985void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001986 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001987}
1988
1989void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001990 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001991}
1992
1993void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001994 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001995}
1996
1997void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001998 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001999}
2000
Aart Bike9f37602015-10-09 11:15:55 -07002001void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002002 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002003}
2004
2005void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002006 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002007}
2008
2009void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002010 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002011}
2012
2013void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002014 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002015}
2016
2017void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002018 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002019}
2020
2021void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002022 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002023}
2024
2025void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002026 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002027}
2028
2029void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002030 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002031}
2032
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002033void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002034 LocationSummary* locations =
2035 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002036 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002037 case Primitive::kPrimBoolean:
2038 case Primitive::kPrimByte:
2039 case Primitive::kPrimShort:
2040 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002041 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00002042 case Primitive::kPrimLong: {
2043 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002044 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002045 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2046 break;
2047 }
2048 case Primitive::kPrimFloat:
2049 case Primitive::kPrimDouble: {
2050 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002051 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002052 locations->SetOut(Location::RequiresRegister());
2053 break;
2054 }
2055 default:
2056 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2057 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002058}
2059
2060void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002061 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002062 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002063 Location left = locations->InAt(0);
2064 Location right = locations->InAt(1);
2065
Mark Mendell0c9497d2015-08-21 09:30:05 -04002066 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00002067 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002068 Condition less_cond = kLess;
2069
Calin Juravleddb7df22014-11-25 20:56:51 +00002070 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002071 case Primitive::kPrimBoolean:
2072 case Primitive::kPrimByte:
2073 case Primitive::kPrimShort:
2074 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002075 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002076 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002077 break;
2078 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002079 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002080 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002081 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002082 }
2083 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04002084 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2085 if (right.IsConstant()) {
2086 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2087 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2088 } else if (right.IsStackSlot()) {
2089 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2090 } else {
2091 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2092 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002093 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002094 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002095 break;
2096 }
2097 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04002098 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2099 if (right.IsConstant()) {
2100 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2101 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2102 } else if (right.IsDoubleStackSlot()) {
2103 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2104 } else {
2105 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2106 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002107 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002108 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002109 break;
2110 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002111 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002112 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002113 }
Aart Bika19616e2016-02-01 18:57:58 -08002114
Calin Juravleddb7df22014-11-25 20:56:51 +00002115 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002116 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002117 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002118
Calin Juravle91debbc2014-11-26 19:01:09 +00002119 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002120 __ movl(out, Immediate(1));
2121 __ jmp(&done);
2122
2123 __ Bind(&less);
2124 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002125
2126 __ Bind(&done);
2127}
2128
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002129void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002130 LocationSummary* locations =
2131 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002132 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002133}
2134
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002135void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002136 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002137}
2138
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002139void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2140 LocationSummary* locations =
2141 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2142 locations->SetOut(Location::ConstantLocation(constant));
2143}
2144
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002145void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002146 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002147}
2148
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002149void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002150 LocationSummary* locations =
2151 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002152 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002153}
2154
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002155void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002156 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002157}
2158
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002159void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2160 LocationSummary* locations =
2161 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2162 locations->SetOut(Location::ConstantLocation(constant));
2163}
2164
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002165void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002166 // Will be generated at use site.
2167}
2168
2169void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2170 LocationSummary* locations =
2171 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2172 locations->SetOut(Location::ConstantLocation(constant));
2173}
2174
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002175void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2176 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002177 // Will be generated at use site.
2178}
2179
Calin Juravle27df7582015-04-17 19:12:31 +01002180void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2181 memory_barrier->SetLocations(nullptr);
2182}
2183
2184void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002185 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002186}
2187
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002188void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2189 ret->SetLocations(nullptr);
2190}
2191
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002192void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002193 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002194}
2195
2196void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002197 LocationSummary* locations =
2198 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002199 switch (ret->InputAt(0)->GetType()) {
2200 case Primitive::kPrimBoolean:
2201 case Primitive::kPrimByte:
2202 case Primitive::kPrimChar:
2203 case Primitive::kPrimShort:
2204 case Primitive::kPrimInt:
2205 case Primitive::kPrimNot:
2206 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002207 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002208 break;
2209
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002210 case Primitive::kPrimFloat:
2211 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04002212 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002213 break;
2214
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002215 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002216 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002217 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002218}
2219
2220void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2221 if (kIsDebugBuild) {
2222 switch (ret->InputAt(0)->GetType()) {
2223 case Primitive::kPrimBoolean:
2224 case Primitive::kPrimByte:
2225 case Primitive::kPrimChar:
2226 case Primitive::kPrimShort:
2227 case Primitive::kPrimInt:
2228 case Primitive::kPrimNot:
2229 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002230 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002231 break;
2232
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002233 case Primitive::kPrimFloat:
2234 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002235 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002236 XMM0);
2237 break;
2238
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002239 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002240 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002241 }
2242 }
2243 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244}
2245
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002246Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2247 switch (type) {
2248 case Primitive::kPrimBoolean:
2249 case Primitive::kPrimByte:
2250 case Primitive::kPrimChar:
2251 case Primitive::kPrimShort:
2252 case Primitive::kPrimInt:
2253 case Primitive::kPrimNot:
2254 case Primitive::kPrimLong:
2255 return Location::RegisterLocation(RAX);
2256
2257 case Primitive::kPrimVoid:
2258 return Location::NoLocation();
2259
2260 case Primitive::kPrimDouble:
2261 case Primitive::kPrimFloat:
2262 return Location::FpuRegisterLocation(XMM0);
2263 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002264
2265 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002266}
2267
2268Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2269 return Location::RegisterLocation(kMethodRegisterArgument);
2270}
2271
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002272Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002273 switch (type) {
2274 case Primitive::kPrimBoolean:
2275 case Primitive::kPrimByte:
2276 case Primitive::kPrimChar:
2277 case Primitive::kPrimShort:
2278 case Primitive::kPrimInt:
2279 case Primitive::kPrimNot: {
2280 uint32_t index = gp_index_++;
2281 stack_index_++;
2282 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002283 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002284 } else {
2285 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2286 }
2287 }
2288
2289 case Primitive::kPrimLong: {
2290 uint32_t index = gp_index_;
2291 stack_index_ += 2;
2292 if (index < calling_convention.GetNumberOfRegisters()) {
2293 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002294 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002295 } else {
2296 gp_index_ += 2;
2297 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2298 }
2299 }
2300
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002301 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002302 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002303 stack_index_++;
2304 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002305 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002306 } else {
2307 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2308 }
2309 }
2310
2311 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002312 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002313 stack_index_ += 2;
2314 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002315 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002316 } else {
2317 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2318 }
2319 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002320
2321 case Primitive::kPrimVoid:
2322 LOG(FATAL) << "Unexpected parameter type " << type;
2323 break;
2324 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002325 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002326}
2327
Calin Juravle175dc732015-08-25 15:42:32 +01002328void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2329 // The trampoline uses the same calling convention as dex calling conventions,
2330 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2331 // the method_idx.
2332 HandleInvoke(invoke);
2333}
2334
2335void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2336 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2337}
2338
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002339void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002340 // Explicit clinit checks triggered by static invokes must have been pruned by
2341 // art::PrepareForRegisterAllocation.
2342 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002343
Mark Mendellfb8d2792015-03-31 22:16:59 -04002344 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002345 if (intrinsic.TryDispatch(invoke)) {
2346 return;
2347 }
2348
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002349 HandleInvoke(invoke);
2350}
2351
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002352static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2353 if (invoke->GetLocations()->Intrinsified()) {
2354 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2355 intrinsic.Dispatch(invoke);
2356 return true;
2357 }
2358 return false;
2359}
2360
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002361void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002362 // Explicit clinit checks triggered by static invokes must have been pruned by
2363 // art::PrepareForRegisterAllocation.
2364 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002365
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002366 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2367 return;
2368 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002369
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002370 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002371 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002372 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002373 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002374}
2375
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002376void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002377 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002378 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002379}
2380
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002381void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002382 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002383 if (intrinsic.TryDispatch(invoke)) {
2384 return;
2385 }
2386
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002387 HandleInvoke(invoke);
2388}
2389
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002390void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002391 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2392 return;
2393 }
2394
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002395 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002396 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002397 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002398}
2399
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002400void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2401 HandleInvoke(invoke);
2402 // Add the hidden argument.
2403 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2404}
2405
2406void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2407 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002408 LocationSummary* locations = invoke->GetLocations();
2409 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2410 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002411 Location receiver = locations->InAt(0);
2412 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2413
Roland Levillain0d5a2812015-11-13 10:07:31 +00002414 // Set the hidden argument. This is safe to do this here, as RAX
2415 // won't be modified thereafter, before the `call` instruction.
2416 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002417 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002418
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002419 if (receiver.IsStackSlot()) {
2420 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002421 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002422 __ movl(temp, Address(temp, class_offset));
2423 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002424 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002425 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002426 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002427 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002428 // Instead of simply (possibly) unpoisoning `temp` here, we should
2429 // emit a read barrier for the previous class reference load.
2430 // However this is not required in practice, as this is an
2431 // intermediate/temporary reference and because the current
2432 // concurrent copying collector keeps the from-space memory
2433 // intact/accessible until the end of the marking phase (the
2434 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002435 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002436 // temp = temp->GetAddressOfIMT()
2437 __ movq(temp,
2438 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2439 // temp = temp->GetImtEntryAt(method_offset);
2440 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002441 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002442 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002443 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002444 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002445 __ call(Address(
2446 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002447
2448 DCHECK(!codegen_->IsLeafMethod());
2449 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2450}
2451
Roland Levillain88cb1752014-10-20 16:36:47 +01002452void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2453 LocationSummary* locations =
2454 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2455 switch (neg->GetResultType()) {
2456 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002457 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002458 locations->SetInAt(0, Location::RequiresRegister());
2459 locations->SetOut(Location::SameAsFirstInput());
2460 break;
2461
Roland Levillain88cb1752014-10-20 16:36:47 +01002462 case Primitive::kPrimFloat:
2463 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002464 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002465 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002466 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002467 break;
2468
2469 default:
2470 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2471 }
2472}
2473
2474void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2475 LocationSummary* locations = neg->GetLocations();
2476 Location out = locations->Out();
2477 Location in = locations->InAt(0);
2478 switch (neg->GetResultType()) {
2479 case Primitive::kPrimInt:
2480 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002481 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002482 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002483 break;
2484
2485 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002486 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002487 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002488 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002489 break;
2490
Roland Levillain5368c212014-11-27 15:03:41 +00002491 case Primitive::kPrimFloat: {
2492 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002493 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002494 // Implement float negation with an exclusive or with value
2495 // 0x80000000 (mask for bit 31, representing the sign of a
2496 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002497 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002498 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002499 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002500 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002501
Roland Levillain5368c212014-11-27 15:03:41 +00002502 case Primitive::kPrimDouble: {
2503 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002504 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002505 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002506 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002507 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002508 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002509 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002510 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002511 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002512
2513 default:
2514 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2515 }
2516}
2517
Roland Levillaindff1f282014-11-05 14:15:05 +00002518void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2519 LocationSummary* locations =
2520 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2521 Primitive::Type result_type = conversion->GetResultType();
2522 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002523 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002524
David Brazdilb2bd1c52015-03-25 11:17:37 +00002525 // The Java language does not allow treating boolean as an integral type but
2526 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002527
Roland Levillaindff1f282014-11-05 14:15:05 +00002528 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002529 case Primitive::kPrimByte:
2530 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002531 case Primitive::kPrimLong:
2532 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002533 case Primitive::kPrimBoolean:
2534 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002535 case Primitive::kPrimShort:
2536 case Primitive::kPrimInt:
2537 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002538 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002539 locations->SetInAt(0, Location::Any());
2540 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2541 break;
2542
2543 default:
2544 LOG(FATAL) << "Unexpected type conversion from " << input_type
2545 << " to " << result_type;
2546 }
2547 break;
2548
Roland Levillain01a8d712014-11-14 16:27:39 +00002549 case Primitive::kPrimShort:
2550 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002551 case Primitive::kPrimLong:
2552 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002553 case Primitive::kPrimBoolean:
2554 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002555 case Primitive::kPrimByte:
2556 case Primitive::kPrimInt:
2557 case Primitive::kPrimChar:
2558 // Processing a Dex `int-to-short' instruction.
2559 locations->SetInAt(0, Location::Any());
2560 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2561 break;
2562
2563 default:
2564 LOG(FATAL) << "Unexpected type conversion from " << input_type
2565 << " to " << result_type;
2566 }
2567 break;
2568
Roland Levillain946e1432014-11-11 17:35:19 +00002569 case Primitive::kPrimInt:
2570 switch (input_type) {
2571 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002572 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002573 locations->SetInAt(0, Location::Any());
2574 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2575 break;
2576
2577 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002578 // Processing a Dex `float-to-int' instruction.
2579 locations->SetInAt(0, Location::RequiresFpuRegister());
2580 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002581 break;
2582
Roland Levillain946e1432014-11-11 17:35:19 +00002583 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002584 // Processing a Dex `double-to-int' instruction.
2585 locations->SetInAt(0, Location::RequiresFpuRegister());
2586 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002587 break;
2588
2589 default:
2590 LOG(FATAL) << "Unexpected type conversion from " << input_type
2591 << " to " << result_type;
2592 }
2593 break;
2594
Roland Levillaindff1f282014-11-05 14:15:05 +00002595 case Primitive::kPrimLong:
2596 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002597 case Primitive::kPrimBoolean:
2598 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002599 case Primitive::kPrimByte:
2600 case Primitive::kPrimShort:
2601 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002602 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002603 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002604 // TODO: We would benefit from a (to-be-implemented)
2605 // Location::RegisterOrStackSlot requirement for this input.
2606 locations->SetInAt(0, Location::RequiresRegister());
2607 locations->SetOut(Location::RequiresRegister());
2608 break;
2609
2610 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002611 // Processing a Dex `float-to-long' instruction.
2612 locations->SetInAt(0, Location::RequiresFpuRegister());
2613 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002614 break;
2615
Roland Levillaindff1f282014-11-05 14:15:05 +00002616 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002617 // Processing a Dex `double-to-long' instruction.
2618 locations->SetInAt(0, Location::RequiresFpuRegister());
2619 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002620 break;
2621
2622 default:
2623 LOG(FATAL) << "Unexpected type conversion from " << input_type
2624 << " to " << result_type;
2625 }
2626 break;
2627
Roland Levillain981e4542014-11-14 11:47:14 +00002628 case Primitive::kPrimChar:
2629 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002630 case Primitive::kPrimLong:
2631 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002632 case Primitive::kPrimBoolean:
2633 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002634 case Primitive::kPrimByte:
2635 case Primitive::kPrimShort:
2636 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002637 // Processing a Dex `int-to-char' instruction.
2638 locations->SetInAt(0, Location::Any());
2639 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2640 break;
2641
2642 default:
2643 LOG(FATAL) << "Unexpected type conversion from " << input_type
2644 << " to " << result_type;
2645 }
2646 break;
2647
Roland Levillaindff1f282014-11-05 14:15:05 +00002648 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002649 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002650 case Primitive::kPrimBoolean:
2651 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002652 case Primitive::kPrimByte:
2653 case Primitive::kPrimShort:
2654 case Primitive::kPrimInt:
2655 case Primitive::kPrimChar:
2656 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002657 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002658 locations->SetOut(Location::RequiresFpuRegister());
2659 break;
2660
2661 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002662 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002663 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002664 locations->SetOut(Location::RequiresFpuRegister());
2665 break;
2666
Roland Levillaincff13742014-11-17 14:32:17 +00002667 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002668 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002669 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002670 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002671 break;
2672
2673 default:
2674 LOG(FATAL) << "Unexpected type conversion from " << input_type
2675 << " to " << result_type;
2676 };
2677 break;
2678
Roland Levillaindff1f282014-11-05 14:15:05 +00002679 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002680 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002681 case Primitive::kPrimBoolean:
2682 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002683 case Primitive::kPrimByte:
2684 case Primitive::kPrimShort:
2685 case Primitive::kPrimInt:
2686 case Primitive::kPrimChar:
2687 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002688 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002689 locations->SetOut(Location::RequiresFpuRegister());
2690 break;
2691
2692 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002693 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002694 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002695 locations->SetOut(Location::RequiresFpuRegister());
2696 break;
2697
Roland Levillaincff13742014-11-17 14:32:17 +00002698 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002699 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002700 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002701 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002702 break;
2703
2704 default:
2705 LOG(FATAL) << "Unexpected type conversion from " << input_type
2706 << " to " << result_type;
2707 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002708 break;
2709
2710 default:
2711 LOG(FATAL) << "Unexpected type conversion from " << input_type
2712 << " to " << result_type;
2713 }
2714}
2715
2716void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2717 LocationSummary* locations = conversion->GetLocations();
2718 Location out = locations->Out();
2719 Location in = locations->InAt(0);
2720 Primitive::Type result_type = conversion->GetResultType();
2721 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002722 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002723 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002724 case Primitive::kPrimByte:
2725 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002726 case Primitive::kPrimLong:
2727 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002728 case Primitive::kPrimBoolean:
2729 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002730 case Primitive::kPrimShort:
2731 case Primitive::kPrimInt:
2732 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002733 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002734 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002735 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002736 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002737 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002738 Address(CpuRegister(RSP), in.GetStackIndex()));
2739 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002740 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002741 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002742 }
2743 break;
2744
2745 default:
2746 LOG(FATAL) << "Unexpected type conversion from " << input_type
2747 << " to " << result_type;
2748 }
2749 break;
2750
Roland Levillain01a8d712014-11-14 16:27:39 +00002751 case Primitive::kPrimShort:
2752 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002753 case Primitive::kPrimLong:
2754 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002755 case Primitive::kPrimBoolean:
2756 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002757 case Primitive::kPrimByte:
2758 case Primitive::kPrimInt:
2759 case Primitive::kPrimChar:
2760 // Processing a Dex `int-to-short' instruction.
2761 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002762 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002763 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002764 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002765 Address(CpuRegister(RSP), in.GetStackIndex()));
2766 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002767 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002768 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002769 }
2770 break;
2771
2772 default:
2773 LOG(FATAL) << "Unexpected type conversion from " << input_type
2774 << " to " << result_type;
2775 }
2776 break;
2777
Roland Levillain946e1432014-11-11 17:35:19 +00002778 case Primitive::kPrimInt:
2779 switch (input_type) {
2780 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002781 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002782 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002783 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002784 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002785 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002786 Address(CpuRegister(RSP), in.GetStackIndex()));
2787 } else {
2788 DCHECK(in.IsConstant());
2789 DCHECK(in.GetConstant()->IsLongConstant());
2790 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002791 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002792 }
2793 break;
2794
Roland Levillain3f8f9362014-12-02 17:45:01 +00002795 case Primitive::kPrimFloat: {
2796 // Processing a Dex `float-to-int' instruction.
2797 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2798 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002799 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002800
2801 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002802 // if input >= (float)INT_MAX goto done
2803 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002804 __ j(kAboveEqual, &done);
2805 // if input == NaN goto nan
2806 __ j(kUnordered, &nan);
2807 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002808 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002809 __ jmp(&done);
2810 __ Bind(&nan);
2811 // output = 0
2812 __ xorl(output, output);
2813 __ Bind(&done);
2814 break;
2815 }
2816
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002817 case Primitive::kPrimDouble: {
2818 // Processing a Dex `double-to-int' instruction.
2819 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2820 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002821 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002822
2823 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002824 // if input >= (double)INT_MAX goto done
2825 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002826 __ j(kAboveEqual, &done);
2827 // if input == NaN goto nan
2828 __ j(kUnordered, &nan);
2829 // output = double-to-int-truncate(input)
2830 __ cvttsd2si(output, input);
2831 __ jmp(&done);
2832 __ Bind(&nan);
2833 // output = 0
2834 __ xorl(output, output);
2835 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002836 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002837 }
Roland Levillain946e1432014-11-11 17:35:19 +00002838
2839 default:
2840 LOG(FATAL) << "Unexpected type conversion from " << input_type
2841 << " to " << result_type;
2842 }
2843 break;
2844
Roland Levillaindff1f282014-11-05 14:15:05 +00002845 case Primitive::kPrimLong:
2846 switch (input_type) {
2847 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002848 case Primitive::kPrimBoolean:
2849 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002850 case Primitive::kPrimByte:
2851 case Primitive::kPrimShort:
2852 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002853 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002854 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002855 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002856 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002857 break;
2858
Roland Levillain624279f2014-12-04 11:54:28 +00002859 case Primitive::kPrimFloat: {
2860 // Processing a Dex `float-to-long' instruction.
2861 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2862 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002863 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002864
Mark Mendell92e83bf2015-05-07 11:25:03 -04002865 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002866 // if input >= (float)LONG_MAX goto done
2867 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002868 __ j(kAboveEqual, &done);
2869 // if input == NaN goto nan
2870 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002871 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002872 __ cvttss2si(output, input, true);
2873 __ jmp(&done);
2874 __ Bind(&nan);
2875 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002876 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002877 __ Bind(&done);
2878 break;
2879 }
2880
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002881 case Primitive::kPrimDouble: {
2882 // Processing a Dex `double-to-long' instruction.
2883 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2884 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002885 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002886
Mark Mendell92e83bf2015-05-07 11:25:03 -04002887 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002888 // if input >= (double)LONG_MAX goto done
2889 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002890 __ j(kAboveEqual, &done);
2891 // if input == NaN goto nan
2892 __ j(kUnordered, &nan);
2893 // output = double-to-long-truncate(input)
2894 __ cvttsd2si(output, input, true);
2895 __ jmp(&done);
2896 __ Bind(&nan);
2897 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002898 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002899 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002900 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002901 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002902
2903 default:
2904 LOG(FATAL) << "Unexpected type conversion from " << input_type
2905 << " to " << result_type;
2906 }
2907 break;
2908
Roland Levillain981e4542014-11-14 11:47:14 +00002909 case Primitive::kPrimChar:
2910 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002911 case Primitive::kPrimLong:
2912 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002913 case Primitive::kPrimBoolean:
2914 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002915 case Primitive::kPrimByte:
2916 case Primitive::kPrimShort:
2917 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002918 // Processing a Dex `int-to-char' instruction.
2919 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002920 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002921 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002922 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002923 Address(CpuRegister(RSP), in.GetStackIndex()));
2924 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002925 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002926 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002927 }
2928 break;
2929
2930 default:
2931 LOG(FATAL) << "Unexpected type conversion from " << input_type
2932 << " to " << result_type;
2933 }
2934 break;
2935
Roland Levillaindff1f282014-11-05 14:15:05 +00002936 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002937 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002938 case Primitive::kPrimBoolean:
2939 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002940 case Primitive::kPrimByte:
2941 case Primitive::kPrimShort:
2942 case Primitive::kPrimInt:
2943 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002944 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002945 if (in.IsRegister()) {
2946 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2947 } else if (in.IsConstant()) {
2948 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2949 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002950 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002951 } else {
2952 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2953 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2954 }
Roland Levillaincff13742014-11-17 14:32:17 +00002955 break;
2956
2957 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002958 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002959 if (in.IsRegister()) {
2960 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2961 } else if (in.IsConstant()) {
2962 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2963 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002964 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002965 } else {
2966 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2967 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2968 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002969 break;
2970
Roland Levillaincff13742014-11-17 14:32:17 +00002971 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002972 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002973 if (in.IsFpuRegister()) {
2974 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2975 } else if (in.IsConstant()) {
2976 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2977 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002978 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002979 } else {
2980 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2981 Address(CpuRegister(RSP), in.GetStackIndex()));
2982 }
Roland Levillaincff13742014-11-17 14:32:17 +00002983 break;
2984
2985 default:
2986 LOG(FATAL) << "Unexpected type conversion from " << input_type
2987 << " to " << result_type;
2988 };
2989 break;
2990
Roland Levillaindff1f282014-11-05 14:15:05 +00002991 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002992 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002993 case Primitive::kPrimBoolean:
2994 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002995 case Primitive::kPrimByte:
2996 case Primitive::kPrimShort:
2997 case Primitive::kPrimInt:
2998 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002999 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003000 if (in.IsRegister()) {
3001 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3002 } else if (in.IsConstant()) {
3003 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3004 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003005 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003006 } else {
3007 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3008 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3009 }
Roland Levillaincff13742014-11-17 14:32:17 +00003010 break;
3011
3012 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00003013 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003014 if (in.IsRegister()) {
3015 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3016 } else if (in.IsConstant()) {
3017 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3018 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003019 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003020 } else {
3021 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3022 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3023 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003024 break;
3025
Roland Levillaincff13742014-11-17 14:32:17 +00003026 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00003027 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04003028 if (in.IsFpuRegister()) {
3029 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3030 } else if (in.IsConstant()) {
3031 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3032 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003033 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003034 } else {
3035 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3036 Address(CpuRegister(RSP), in.GetStackIndex()));
3037 }
Roland Levillaincff13742014-11-17 14:32:17 +00003038 break;
3039
3040 default:
3041 LOG(FATAL) << "Unexpected type conversion from " << input_type
3042 << " to " << result_type;
3043 };
Roland Levillaindff1f282014-11-05 14:15:05 +00003044 break;
3045
3046 default:
3047 LOG(FATAL) << "Unexpected type conversion from " << input_type
3048 << " to " << result_type;
3049 }
3050}
3051
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003052void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003053 LocationSummary* locations =
3054 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003055 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003056 case Primitive::kPrimInt: {
3057 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003058 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3059 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003060 break;
3061 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003062
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003063 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003064 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003065 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003066 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003067 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003068 break;
3069 }
3070
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003071 case Primitive::kPrimDouble:
3072 case Primitive::kPrimFloat: {
3073 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003074 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003075 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003076 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003077 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003078
3079 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003080 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003081 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003082}
3083
3084void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3085 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003086 Location first = locations->InAt(0);
3087 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003088 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003089
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003090 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003091 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003092 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003093 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3094 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003095 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3096 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003097 } else {
3098 __ leal(out.AsRegister<CpuRegister>(), Address(
3099 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3100 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003101 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003102 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3103 __ addl(out.AsRegister<CpuRegister>(),
3104 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3105 } else {
3106 __ leal(out.AsRegister<CpuRegister>(), Address(
3107 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3108 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003109 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003110 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003111 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003112 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003113 break;
3114 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003115
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003116 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05003117 if (second.IsRegister()) {
3118 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3119 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003120 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3121 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003122 } else {
3123 __ leaq(out.AsRegister<CpuRegister>(), Address(
3124 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3125 }
3126 } else {
3127 DCHECK(second.IsConstant());
3128 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3129 int32_t int32_value = Low32Bits(value);
3130 DCHECK_EQ(int32_value, value);
3131 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3132 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3133 } else {
3134 __ leaq(out.AsRegister<CpuRegister>(), Address(
3135 first.AsRegister<CpuRegister>(), int32_value));
3136 }
3137 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003138 break;
3139 }
3140
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003141 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003142 if (second.IsFpuRegister()) {
3143 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3144 } else if (second.IsConstant()) {
3145 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003146 codegen_->LiteralFloatAddress(
3147 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003148 } else {
3149 DCHECK(second.IsStackSlot());
3150 __ addss(first.AsFpuRegister<XmmRegister>(),
3151 Address(CpuRegister(RSP), second.GetStackIndex()));
3152 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003153 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003154 }
3155
3156 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003157 if (second.IsFpuRegister()) {
3158 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3159 } else if (second.IsConstant()) {
3160 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003161 codegen_->LiteralDoubleAddress(
3162 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003163 } else {
3164 DCHECK(second.IsDoubleStackSlot());
3165 __ addsd(first.AsFpuRegister<XmmRegister>(),
3166 Address(CpuRegister(RSP), second.GetStackIndex()));
3167 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003168 break;
3169 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003170
3171 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003172 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003173 }
3174}
3175
3176void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003177 LocationSummary* locations =
3178 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003179 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003180 case Primitive::kPrimInt: {
3181 locations->SetInAt(0, Location::RequiresRegister());
3182 locations->SetInAt(1, Location::Any());
3183 locations->SetOut(Location::SameAsFirstInput());
3184 break;
3185 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003186 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003187 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003188 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003189 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003190 break;
3191 }
Calin Juravle11351682014-10-23 15:38:15 +01003192 case Primitive::kPrimFloat:
3193 case Primitive::kPrimDouble: {
3194 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003195 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003196 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003197 break;
Calin Juravle11351682014-10-23 15:38:15 +01003198 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003199 default:
Calin Juravle11351682014-10-23 15:38:15 +01003200 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003201 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003202}
3203
3204void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3205 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003206 Location first = locations->InAt(0);
3207 Location second = locations->InAt(1);
3208 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003209 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003210 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01003211 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003212 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003213 } else if (second.IsConstant()) {
3214 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003215 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003216 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003217 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003218 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003219 break;
3220 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003221 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003222 if (second.IsConstant()) {
3223 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3224 DCHECK(IsInt<32>(value));
3225 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3226 } else {
3227 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3228 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003229 break;
3230 }
3231
Calin Juravle11351682014-10-23 15:38:15 +01003232 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003233 if (second.IsFpuRegister()) {
3234 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3235 } else if (second.IsConstant()) {
3236 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003237 codegen_->LiteralFloatAddress(
3238 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003239 } else {
3240 DCHECK(second.IsStackSlot());
3241 __ subss(first.AsFpuRegister<XmmRegister>(),
3242 Address(CpuRegister(RSP), second.GetStackIndex()));
3243 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003244 break;
Calin Juravle11351682014-10-23 15:38:15 +01003245 }
3246
3247 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003248 if (second.IsFpuRegister()) {
3249 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3250 } else if (second.IsConstant()) {
3251 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003252 codegen_->LiteralDoubleAddress(
3253 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003254 } else {
3255 DCHECK(second.IsDoubleStackSlot());
3256 __ subsd(first.AsFpuRegister<XmmRegister>(),
3257 Address(CpuRegister(RSP), second.GetStackIndex()));
3258 }
Calin Juravle11351682014-10-23 15:38:15 +01003259 break;
3260 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003261
3262 default:
Calin Juravle11351682014-10-23 15:38:15 +01003263 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003264 }
3265}
3266
Calin Juravle34bacdf2014-10-07 20:23:36 +01003267void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3268 LocationSummary* locations =
3269 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3270 switch (mul->GetResultType()) {
3271 case Primitive::kPrimInt: {
3272 locations->SetInAt(0, Location::RequiresRegister());
3273 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003274 if (mul->InputAt(1)->IsIntConstant()) {
3275 // Can use 3 operand multiply.
3276 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3277 } else {
3278 locations->SetOut(Location::SameAsFirstInput());
3279 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003280 break;
3281 }
3282 case Primitive::kPrimLong: {
3283 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003284 locations->SetInAt(1, Location::Any());
3285 if (mul->InputAt(1)->IsLongConstant() &&
3286 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003287 // Can use 3 operand multiply.
3288 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3289 } else {
3290 locations->SetOut(Location::SameAsFirstInput());
3291 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003292 break;
3293 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003294 case Primitive::kPrimFloat:
3295 case Primitive::kPrimDouble: {
3296 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003297 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003298 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003299 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003300 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003301
3302 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003303 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003304 }
3305}
3306
3307void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3308 LocationSummary* locations = mul->GetLocations();
3309 Location first = locations->InAt(0);
3310 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003311 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003312 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003313 case Primitive::kPrimInt:
3314 // The constant may have ended up in a register, so test explicitly to avoid
3315 // problems where the output may not be the same as the first operand.
3316 if (mul->InputAt(1)->IsIntConstant()) {
3317 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3318 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3319 } else if (second.IsRegister()) {
3320 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003321 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003322 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003323 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003324 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003325 __ imull(first.AsRegister<CpuRegister>(),
3326 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003327 }
3328 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003329 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003330 // The constant may have ended up in a register, so test explicitly to avoid
3331 // problems where the output may not be the same as the first operand.
3332 if (mul->InputAt(1)->IsLongConstant()) {
3333 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3334 if (IsInt<32>(value)) {
3335 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3336 Immediate(static_cast<int32_t>(value)));
3337 } else {
3338 // Have to use the constant area.
3339 DCHECK(first.Equals(out));
3340 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3341 }
3342 } else if (second.IsRegister()) {
3343 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003344 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003345 } else {
3346 DCHECK(second.IsDoubleStackSlot());
3347 DCHECK(first.Equals(out));
3348 __ imulq(first.AsRegister<CpuRegister>(),
3349 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003350 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003351 break;
3352 }
3353
Calin Juravleb5bfa962014-10-21 18:02:24 +01003354 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003355 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003356 if (second.IsFpuRegister()) {
3357 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3358 } else if (second.IsConstant()) {
3359 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003360 codegen_->LiteralFloatAddress(
3361 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003362 } else {
3363 DCHECK(second.IsStackSlot());
3364 __ mulss(first.AsFpuRegister<XmmRegister>(),
3365 Address(CpuRegister(RSP), second.GetStackIndex()));
3366 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003367 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003368 }
3369
3370 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003371 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003372 if (second.IsFpuRegister()) {
3373 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3374 } else if (second.IsConstant()) {
3375 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003376 codegen_->LiteralDoubleAddress(
3377 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003378 } else {
3379 DCHECK(second.IsDoubleStackSlot());
3380 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3381 Address(CpuRegister(RSP), second.GetStackIndex()));
3382 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003383 break;
3384 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003385
3386 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003387 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003388 }
3389}
3390
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003391void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3392 uint32_t stack_adjustment, bool is_float) {
3393 if (source.IsStackSlot()) {
3394 DCHECK(is_float);
3395 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3396 } else if (source.IsDoubleStackSlot()) {
3397 DCHECK(!is_float);
3398 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3399 } else {
3400 // Write the value to the temporary location on the stack and load to FP stack.
3401 if (is_float) {
3402 Location stack_temp = Location::StackSlot(temp_offset);
3403 codegen_->Move(stack_temp, source);
3404 __ flds(Address(CpuRegister(RSP), temp_offset));
3405 } else {
3406 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3407 codegen_->Move(stack_temp, source);
3408 __ fldl(Address(CpuRegister(RSP), temp_offset));
3409 }
3410 }
3411}
3412
3413void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3414 Primitive::Type type = rem->GetResultType();
3415 bool is_float = type == Primitive::kPrimFloat;
3416 size_t elem_size = Primitive::ComponentSize(type);
3417 LocationSummary* locations = rem->GetLocations();
3418 Location first = locations->InAt(0);
3419 Location second = locations->InAt(1);
3420 Location out = locations->Out();
3421
3422 // Create stack space for 2 elements.
3423 // TODO: enhance register allocator to ask for stack temporaries.
3424 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3425
3426 // Load the values to the FP stack in reverse order, using temporaries if needed.
3427 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3428 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3429
3430 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003431 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003432 __ Bind(&retry);
3433 __ fprem();
3434
3435 // Move FP status to AX.
3436 __ fstsw();
3437
3438 // And see if the argument reduction is complete. This is signaled by the
3439 // C2 FPU flag bit set to 0.
3440 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3441 __ j(kNotEqual, &retry);
3442
3443 // We have settled on the final value. Retrieve it into an XMM register.
3444 // Store FP top of stack to real stack.
3445 if (is_float) {
3446 __ fsts(Address(CpuRegister(RSP), 0));
3447 } else {
3448 __ fstl(Address(CpuRegister(RSP), 0));
3449 }
3450
3451 // Pop the 2 items from the FP stack.
3452 __ fucompp();
3453
3454 // Load the value from the stack into an XMM register.
3455 DCHECK(out.IsFpuRegister()) << out;
3456 if (is_float) {
3457 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3458 } else {
3459 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3460 }
3461
3462 // And remove the temporary stack space we allocated.
3463 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3464}
3465
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003466void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3467 DCHECK(instruction->IsDiv() || instruction->IsRem());
3468
3469 LocationSummary* locations = instruction->GetLocations();
3470 Location second = locations->InAt(1);
3471 DCHECK(second.IsConstant());
3472
3473 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3474 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003475 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003476
3477 DCHECK(imm == 1 || imm == -1);
3478
3479 switch (instruction->GetResultType()) {
3480 case Primitive::kPrimInt: {
3481 if (instruction->IsRem()) {
3482 __ xorl(output_register, output_register);
3483 } else {
3484 __ movl(output_register, input_register);
3485 if (imm == -1) {
3486 __ negl(output_register);
3487 }
3488 }
3489 break;
3490 }
3491
3492 case Primitive::kPrimLong: {
3493 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003494 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003495 } else {
3496 __ movq(output_register, input_register);
3497 if (imm == -1) {
3498 __ negq(output_register);
3499 }
3500 }
3501 break;
3502 }
3503
3504 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003505 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003506 }
3507}
3508
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003509void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003510 LocationSummary* locations = instruction->GetLocations();
3511 Location second = locations->InAt(1);
3512
3513 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3514 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3515
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003516 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003517 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3518 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003519
3520 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3521
3522 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003523 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003524 __ testl(numerator, numerator);
3525 __ cmov(kGreaterEqual, tmp, numerator);
3526 int shift = CTZ(imm);
3527 __ sarl(tmp, Immediate(shift));
3528
3529 if (imm < 0) {
3530 __ negl(tmp);
3531 }
3532
3533 __ movl(output_register, tmp);
3534 } else {
3535 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3536 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3537
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003538 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003539 __ addq(rdx, numerator);
3540 __ testq(numerator, numerator);
3541 __ cmov(kGreaterEqual, rdx, numerator);
3542 int shift = CTZ(imm);
3543 __ sarq(rdx, Immediate(shift));
3544
3545 if (imm < 0) {
3546 __ negq(rdx);
3547 }
3548
3549 __ movq(output_register, rdx);
3550 }
3551}
3552
3553void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3554 DCHECK(instruction->IsDiv() || instruction->IsRem());
3555
3556 LocationSummary* locations = instruction->GetLocations();
3557 Location second = locations->InAt(1);
3558
3559 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3560 : locations->GetTemp(0).AsRegister<CpuRegister>();
3561 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3562 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3563 : locations->Out().AsRegister<CpuRegister>();
3564 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3565
3566 DCHECK_EQ(RAX, eax.AsRegister());
3567 DCHECK_EQ(RDX, edx.AsRegister());
3568 if (instruction->IsDiv()) {
3569 DCHECK_EQ(RAX, out.AsRegister());
3570 } else {
3571 DCHECK_EQ(RDX, out.AsRegister());
3572 }
3573
3574 int64_t magic;
3575 int shift;
3576
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003577 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 if (instruction->GetResultType() == Primitive::kPrimInt) {
3579 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3580
3581 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3582
3583 __ movl(numerator, eax);
3584
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003585 __ movl(eax, Immediate(magic));
3586 __ imull(numerator);
3587
3588 if (imm > 0 && magic < 0) {
3589 __ addl(edx, numerator);
3590 } else if (imm < 0 && magic > 0) {
3591 __ subl(edx, numerator);
3592 }
3593
3594 if (shift != 0) {
3595 __ sarl(edx, Immediate(shift));
3596 }
3597
3598 __ movl(eax, edx);
3599 __ shrl(edx, Immediate(31));
3600 __ addl(edx, eax);
3601
3602 if (instruction->IsRem()) {
3603 __ movl(eax, numerator);
3604 __ imull(edx, Immediate(imm));
3605 __ subl(eax, edx);
3606 __ movl(edx, eax);
3607 } else {
3608 __ movl(eax, edx);
3609 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003610 } else {
3611 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3612
3613 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3614
3615 CpuRegister rax = eax;
3616 CpuRegister rdx = edx;
3617
3618 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3619
3620 // Save the numerator.
3621 __ movq(numerator, rax);
3622
3623 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003624 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003625
3626 // RDX:RAX = magic * numerator
3627 __ imulq(numerator);
3628
3629 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003630 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003631 __ addq(rdx, numerator);
3632 } else if (imm < 0 && magic > 0) {
3633 // RDX -= numerator
3634 __ subq(rdx, numerator);
3635 }
3636
3637 // Shift if needed.
3638 if (shift != 0) {
3639 __ sarq(rdx, Immediate(shift));
3640 }
3641
3642 // RDX += 1 if RDX < 0
3643 __ movq(rax, rdx);
3644 __ shrq(rdx, Immediate(63));
3645 __ addq(rdx, rax);
3646
3647 if (instruction->IsRem()) {
3648 __ movq(rax, numerator);
3649
3650 if (IsInt<32>(imm)) {
3651 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3652 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003653 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003654 }
3655
3656 __ subq(rax, rdx);
3657 __ movq(rdx, rax);
3658 } else {
3659 __ movq(rax, rdx);
3660 }
3661 }
3662}
3663
Calin Juravlebacfec32014-11-14 15:54:36 +00003664void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3665 DCHECK(instruction->IsDiv() || instruction->IsRem());
3666 Primitive::Type type = instruction->GetResultType();
3667 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3668
3669 bool is_div = instruction->IsDiv();
3670 LocationSummary* locations = instruction->GetLocations();
3671
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003672 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3673 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003674
Roland Levillain271ab9c2014-11-27 15:23:57 +00003675 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003676 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003677
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003678 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003679 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003680
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003681 if (imm == 0) {
3682 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3683 } else if (imm == 1 || imm == -1) {
3684 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003685 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003686 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003687 } else {
3688 DCHECK(imm <= -2 || imm >= 2);
3689 GenerateDivRemWithAnyConstant(instruction);
3690 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003691 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003692 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003693 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003694 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003695 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003696
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003697 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3698 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3699 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3700 // so it's safe to just use negl instead of more complex comparisons.
3701 if (type == Primitive::kPrimInt) {
3702 __ cmpl(second_reg, Immediate(-1));
3703 __ j(kEqual, slow_path->GetEntryLabel());
3704 // edx:eax <- sign-extended of eax
3705 __ cdq();
3706 // eax = quotient, edx = remainder
3707 __ idivl(second_reg);
3708 } else {
3709 __ cmpq(second_reg, Immediate(-1));
3710 __ j(kEqual, slow_path->GetEntryLabel());
3711 // rdx:rax <- sign-extended of rax
3712 __ cqo();
3713 // rax = quotient, rdx = remainder
3714 __ idivq(second_reg);
3715 }
3716 __ Bind(slow_path->GetExitLabel());
3717 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003718}
3719
Calin Juravle7c4954d2014-10-28 16:57:40 +00003720void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3721 LocationSummary* locations =
3722 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3723 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003724 case Primitive::kPrimInt:
3725 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003726 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003727 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003728 locations->SetOut(Location::SameAsFirstInput());
3729 // Intel uses edx:eax as the dividend.
3730 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003731 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3732 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3733 // output and request another temp.
3734 if (div->InputAt(1)->IsConstant()) {
3735 locations->AddTemp(Location::RequiresRegister());
3736 }
Calin Juravled0d48522014-11-04 16:40:20 +00003737 break;
3738 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003739
Calin Juravle7c4954d2014-10-28 16:57:40 +00003740 case Primitive::kPrimFloat:
3741 case Primitive::kPrimDouble: {
3742 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003743 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003744 locations->SetOut(Location::SameAsFirstInput());
3745 break;
3746 }
3747
3748 default:
3749 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3750 }
3751}
3752
3753void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3754 LocationSummary* locations = div->GetLocations();
3755 Location first = locations->InAt(0);
3756 Location second = locations->InAt(1);
3757 DCHECK(first.Equals(locations->Out()));
3758
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003759 Primitive::Type type = div->GetResultType();
3760 switch (type) {
3761 case Primitive::kPrimInt:
3762 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003763 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003764 break;
3765 }
3766
Calin Juravle7c4954d2014-10-28 16:57:40 +00003767 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003768 if (second.IsFpuRegister()) {
3769 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3770 } else if (second.IsConstant()) {
3771 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003772 codegen_->LiteralFloatAddress(
3773 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003774 } else {
3775 DCHECK(second.IsStackSlot());
3776 __ divss(first.AsFpuRegister<XmmRegister>(),
3777 Address(CpuRegister(RSP), second.GetStackIndex()));
3778 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003779 break;
3780 }
3781
3782 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003783 if (second.IsFpuRegister()) {
3784 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3785 } else if (second.IsConstant()) {
3786 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003787 codegen_->LiteralDoubleAddress(
3788 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003789 } else {
3790 DCHECK(second.IsDoubleStackSlot());
3791 __ divsd(first.AsFpuRegister<XmmRegister>(),
3792 Address(CpuRegister(RSP), second.GetStackIndex()));
3793 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003794 break;
3795 }
3796
3797 default:
3798 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3799 }
3800}
3801
Calin Juravlebacfec32014-11-14 15:54:36 +00003802void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003803 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003804 LocationSummary* locations =
3805 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003806
3807 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003808 case Primitive::kPrimInt:
3809 case Primitive::kPrimLong: {
3810 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003811 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003812 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3813 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003814 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3815 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3816 // output and request another temp.
3817 if (rem->InputAt(1)->IsConstant()) {
3818 locations->AddTemp(Location::RequiresRegister());
3819 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003820 break;
3821 }
3822
3823 case Primitive::kPrimFloat:
3824 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003825 locations->SetInAt(0, Location::Any());
3826 locations->SetInAt(1, Location::Any());
3827 locations->SetOut(Location::RequiresFpuRegister());
3828 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003829 break;
3830 }
3831
3832 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003833 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003834 }
3835}
3836
3837void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3838 Primitive::Type type = rem->GetResultType();
3839 switch (type) {
3840 case Primitive::kPrimInt:
3841 case Primitive::kPrimLong: {
3842 GenerateDivRemIntegral(rem);
3843 break;
3844 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003845 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003846 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003847 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003848 break;
3849 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003850 default:
3851 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3852 }
3853}
3854
Calin Juravled0d48522014-11-04 16:40:20 +00003855void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003856 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003857 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003858}
3859
3860void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003861 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003862 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3863 codegen_->AddSlowPath(slow_path);
3864
3865 LocationSummary* locations = instruction->GetLocations();
3866 Location value = locations->InAt(0);
3867
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003868 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003869 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003870 case Primitive::kPrimByte:
3871 case Primitive::kPrimChar:
3872 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003873 case Primitive::kPrimInt: {
3874 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003875 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003876 __ j(kEqual, slow_path->GetEntryLabel());
3877 } else if (value.IsStackSlot()) {
3878 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3879 __ j(kEqual, slow_path->GetEntryLabel());
3880 } else {
3881 DCHECK(value.IsConstant()) << value;
3882 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003883 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003884 }
3885 }
3886 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003887 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003888 case Primitive::kPrimLong: {
3889 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003890 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003891 __ j(kEqual, slow_path->GetEntryLabel());
3892 } else if (value.IsDoubleStackSlot()) {
3893 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3894 __ j(kEqual, slow_path->GetEntryLabel());
3895 } else {
3896 DCHECK(value.IsConstant()) << value;
3897 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003898 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003899 }
3900 }
3901 break;
3902 }
3903 default:
3904 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003905 }
Calin Juravled0d48522014-11-04 16:40:20 +00003906}
3907
Calin Juravle9aec02f2014-11-18 23:06:35 +00003908void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3909 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3910
3911 LocationSummary* locations =
3912 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3913
3914 switch (op->GetResultType()) {
3915 case Primitive::kPrimInt:
3916 case Primitive::kPrimLong: {
3917 locations->SetInAt(0, Location::RequiresRegister());
3918 // The shift count needs to be in CL.
3919 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3920 locations->SetOut(Location::SameAsFirstInput());
3921 break;
3922 }
3923 default:
3924 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3925 }
3926}
3927
3928void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3929 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3930
3931 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003932 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003933 Location second = locations->InAt(1);
3934
3935 switch (op->GetResultType()) {
3936 case Primitive::kPrimInt: {
3937 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003938 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003939 if (op->IsShl()) {
3940 __ shll(first_reg, second_reg);
3941 } else if (op->IsShr()) {
3942 __ sarl(first_reg, second_reg);
3943 } else {
3944 __ shrl(first_reg, second_reg);
3945 }
3946 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003947 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003948 if (op->IsShl()) {
3949 __ shll(first_reg, imm);
3950 } else if (op->IsShr()) {
3951 __ sarl(first_reg, imm);
3952 } else {
3953 __ shrl(first_reg, imm);
3954 }
3955 }
3956 break;
3957 }
3958 case Primitive::kPrimLong: {
3959 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003960 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003961 if (op->IsShl()) {
3962 __ shlq(first_reg, second_reg);
3963 } else if (op->IsShr()) {
3964 __ sarq(first_reg, second_reg);
3965 } else {
3966 __ shrq(first_reg, second_reg);
3967 }
3968 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003969 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003970 if (op->IsShl()) {
3971 __ shlq(first_reg, imm);
3972 } else if (op->IsShr()) {
3973 __ sarq(first_reg, imm);
3974 } else {
3975 __ shrq(first_reg, imm);
3976 }
3977 }
3978 break;
3979 }
3980 default:
3981 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003982 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003983 }
3984}
3985
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003986void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3987 LocationSummary* locations =
3988 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3989
3990 switch (ror->GetResultType()) {
3991 case Primitive::kPrimInt:
3992 case Primitive::kPrimLong: {
3993 locations->SetInAt(0, Location::RequiresRegister());
3994 // The shift count needs to be in CL (unless it is a constant).
3995 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3996 locations->SetOut(Location::SameAsFirstInput());
3997 break;
3998 }
3999 default:
4000 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4001 UNREACHABLE();
4002 }
4003}
4004
4005void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4006 LocationSummary* locations = ror->GetLocations();
4007 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4008 Location second = locations->InAt(1);
4009
4010 switch (ror->GetResultType()) {
4011 case Primitive::kPrimInt:
4012 if (second.IsRegister()) {
4013 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4014 __ rorl(first_reg, second_reg);
4015 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004016 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004017 __ rorl(first_reg, imm);
4018 }
4019 break;
4020 case Primitive::kPrimLong:
4021 if (second.IsRegister()) {
4022 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4023 __ rorq(first_reg, second_reg);
4024 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004025 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004026 __ rorq(first_reg, imm);
4027 }
4028 break;
4029 default:
4030 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4031 UNREACHABLE();
4032 }
4033}
4034
Calin Juravle9aec02f2014-11-18 23:06:35 +00004035void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4036 HandleShift(shl);
4037}
4038
4039void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4040 HandleShift(shl);
4041}
4042
4043void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4044 HandleShift(shr);
4045}
4046
4047void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4048 HandleShift(shr);
4049}
4050
4051void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4052 HandleShift(ushr);
4053}
4054
4055void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4056 HandleShift(ushr);
4057}
4058
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004059void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004060 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004061 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004062 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004063 if (instruction->IsStringAlloc()) {
4064 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4065 } else {
4066 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4067 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4068 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004069 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004070}
4071
4072void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004073 // Note: if heap poisoning is enabled, the entry point takes cares
4074 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004075 if (instruction->IsStringAlloc()) {
4076 // String is allocated through StringFactory. Call NewEmptyString entry point.
4077 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004078 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004079 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4080 __ call(Address(temp, code_offset.SizeValue()));
4081 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4082 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004083 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004084 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4085 DCHECK(!codegen_->IsLeafMethod());
4086 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004087}
4088
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004089void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
4090 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004091 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004092 InvokeRuntimeCallingConvention calling_convention;
4093 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004094 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004095 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004096 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004097}
4098
4099void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
4100 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04004101 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
Andreas Gampea5b09a62016-11-17 15:21:22 -08004102 instruction->GetTypeIndex().index_);
Roland Levillain4d027112015-07-01 15:41:14 +01004103 // Note: if heap poisoning is enabled, the entry point takes cares
4104 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004105 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004106 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004107
4108 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004109}
4110
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004111void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004112 LocationSummary* locations =
4113 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004114 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4115 if (location.IsStackSlot()) {
4116 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4117 } else if (location.IsDoubleStackSlot()) {
4118 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4119 }
4120 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004121}
4122
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004123void InstructionCodeGeneratorX86_64::VisitParameterValue(
4124 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004125 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004126}
4127
4128void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4129 LocationSummary* locations =
4130 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4131 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4132}
4133
4134void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4135 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4136 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004137}
4138
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004139void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4140 LocationSummary* locations =
4141 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4142 locations->SetInAt(0, Location::RequiresRegister());
4143 locations->SetOut(Location::RequiresRegister());
4144}
4145
4146void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4147 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004148 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004149 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004150 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004151 __ movq(locations->Out().AsRegister<CpuRegister>(),
4152 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004153 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004154 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004155 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004156 __ movq(locations->Out().AsRegister<CpuRegister>(),
4157 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4158 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004159 __ movq(locations->Out().AsRegister<CpuRegister>(),
4160 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004161 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004162}
4163
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004164void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004165 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004166 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004167 locations->SetInAt(0, Location::RequiresRegister());
4168 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004169}
4170
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004171void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4172 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004173 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4174 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004175 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004176 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004177 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004178 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004179 break;
4180
4181 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004182 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004183 break;
4184
4185 default:
4186 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4187 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004188}
4189
David Brazdil66d126e2015-04-03 16:02:44 +01004190void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4191 LocationSummary* locations =
4192 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4193 locations->SetInAt(0, Location::RequiresRegister());
4194 locations->SetOut(Location::SameAsFirstInput());
4195}
4196
4197void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004198 LocationSummary* locations = bool_not->GetLocations();
4199 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4200 locations->Out().AsRegister<CpuRegister>().AsRegister());
4201 Location out = locations->Out();
4202 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4203}
4204
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004205void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004206 LocationSummary* locations =
4207 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004208 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004209 locations->SetInAt(i, Location::Any());
4210 }
4211 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004212}
4213
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004214void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004215 LOG(FATAL) << "Unimplemented";
4216}
4217
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004218void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004219 /*
4220 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004221 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004222 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4223 */
4224 switch (kind) {
4225 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004226 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004227 break;
4228 }
4229 case MemBarrierKind::kAnyStore:
4230 case MemBarrierKind::kLoadAny:
4231 case MemBarrierKind::kStoreStore: {
4232 // nop
4233 break;
4234 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004235 case MemBarrierKind::kNTStoreStore:
4236 // Non-Temporal Store/Store needs an explicit fence.
4237 MemoryFence(/* non-temporal */ true);
4238 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004239 }
4240}
4241
4242void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4243 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4244
Roland Levillain0d5a2812015-11-13 10:07:31 +00004245 bool object_field_get_with_read_barrier =
4246 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004247 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004248 new (GetGraph()->GetArena()) LocationSummary(instruction,
4249 object_field_get_with_read_barrier ?
4250 LocationSummary::kCallOnSlowPath :
4251 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004252 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004253 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004254 }
Calin Juravle52c48962014-12-16 17:02:57 +00004255 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004256 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4257 locations->SetOut(Location::RequiresFpuRegister());
4258 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004259 // The output overlaps for an object field get when read barriers
4260 // are enabled: we do not want the move to overwrite the object's
4261 // location, as we need it to emit the read barrier.
4262 locations->SetOut(
4263 Location::RequiresRegister(),
4264 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004265 }
Calin Juravle52c48962014-12-16 17:02:57 +00004266}
4267
4268void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4269 const FieldInfo& field_info) {
4270 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4271
4272 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004273 Location base_loc = locations->InAt(0);
4274 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004275 Location out = locations->Out();
4276 bool is_volatile = field_info.IsVolatile();
4277 Primitive::Type field_type = field_info.GetFieldType();
4278 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4279
4280 switch (field_type) {
4281 case Primitive::kPrimBoolean: {
4282 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4283 break;
4284 }
4285
4286 case Primitive::kPrimByte: {
4287 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4288 break;
4289 }
4290
4291 case Primitive::kPrimShort: {
4292 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4293 break;
4294 }
4295
4296 case Primitive::kPrimChar: {
4297 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4298 break;
4299 }
4300
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004301 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004302 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4303 break;
4304 }
4305
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004306 case Primitive::kPrimNot: {
4307 // /* HeapReference<Object> */ out = *(base + offset)
4308 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004309 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004310 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004311 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004312 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004313 if (is_volatile) {
4314 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4315 }
4316 } else {
4317 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4318 codegen_->MaybeRecordImplicitNullCheck(instruction);
4319 if (is_volatile) {
4320 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4321 }
4322 // If read barriers are enabled, emit read barriers other than
4323 // Baker's using a slow path (and also unpoison the loaded
4324 // reference, if heap poisoning is enabled).
4325 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4326 }
4327 break;
4328 }
4329
Calin Juravle52c48962014-12-16 17:02:57 +00004330 case Primitive::kPrimLong: {
4331 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4332 break;
4333 }
4334
4335 case Primitive::kPrimFloat: {
4336 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4337 break;
4338 }
4339
4340 case Primitive::kPrimDouble: {
4341 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4342 break;
4343 }
4344
4345 case Primitive::kPrimVoid:
4346 LOG(FATAL) << "Unreachable type " << field_type;
4347 UNREACHABLE();
4348 }
4349
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004350 if (field_type == Primitive::kPrimNot) {
4351 // Potential implicit null checks, in the case of reference
4352 // fields, are handled in the previous switch statement.
4353 } else {
4354 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004355 }
Roland Levillain4d027112015-07-01 15:41:14 +01004356
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004357 if (is_volatile) {
4358 if (field_type == Primitive::kPrimNot) {
4359 // Memory barriers, in the case of references, are also handled
4360 // in the previous switch statement.
4361 } else {
4362 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4363 }
Roland Levillain4d027112015-07-01 15:41:14 +01004364 }
Calin Juravle52c48962014-12-16 17:02:57 +00004365}
4366
4367void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4368 const FieldInfo& field_info) {
4369 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4370
4371 LocationSummary* locations =
4372 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004373 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004374 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004375 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004376 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004377
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004378 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004379 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004380 if (is_volatile) {
4381 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4382 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4383 } else {
4384 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4385 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004386 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004387 if (is_volatile) {
4388 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4389 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4390 } else {
4391 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4392 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004393 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004394 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004395 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004396 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004397 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004398 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4399 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004400 locations->AddTemp(Location::RequiresRegister());
4401 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004402}
4403
Calin Juravle52c48962014-12-16 17:02:57 +00004404void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004405 const FieldInfo& field_info,
4406 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004407 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4408
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004410 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4411 Location value = locations->InAt(1);
4412 bool is_volatile = field_info.IsVolatile();
4413 Primitive::Type field_type = field_info.GetFieldType();
4414 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4415
4416 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004417 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004418 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004419
Mark Mendellea5af682015-10-22 17:35:49 -04004420 bool maybe_record_implicit_null_check_done = false;
4421
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004422 switch (field_type) {
4423 case Primitive::kPrimBoolean:
4424 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004425 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004426 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004427 __ movb(Address(base, offset), Immediate(v));
4428 } else {
4429 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4430 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004431 break;
4432 }
4433
4434 case Primitive::kPrimShort:
4435 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004436 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004437 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004438 __ movw(Address(base, offset), Immediate(v));
4439 } else {
4440 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4441 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004442 break;
4443 }
4444
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004445 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004446 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004447 if (value.IsConstant()) {
4448 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004449 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4450 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4451 // Note: if heap poisoning is enabled, no need to poison
4452 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004453 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004454 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004455 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4456 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4457 __ movl(temp, value.AsRegister<CpuRegister>());
4458 __ PoisonHeapReference(temp);
4459 __ movl(Address(base, offset), temp);
4460 } else {
4461 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4462 }
Mark Mendell40741f32015-04-20 22:10:34 -04004463 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004464 break;
4465 }
4466
4467 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004468 if (value.IsConstant()) {
4469 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004470 codegen_->MoveInt64ToAddress(Address(base, offset),
4471 Address(base, offset + sizeof(int32_t)),
4472 v,
4473 instruction);
4474 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004475 } else {
4476 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4477 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004478 break;
4479 }
4480
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004481 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004482 if (value.IsConstant()) {
4483 int32_t v =
4484 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4485 __ movl(Address(base, offset), Immediate(v));
4486 } else {
4487 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4488 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004489 break;
4490 }
4491
4492 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004493 if (value.IsConstant()) {
4494 int64_t v =
4495 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4496 codegen_->MoveInt64ToAddress(Address(base, offset),
4497 Address(base, offset + sizeof(int32_t)),
4498 v,
4499 instruction);
4500 maybe_record_implicit_null_check_done = true;
4501 } else {
4502 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4503 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004504 break;
4505 }
4506
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004507 case Primitive::kPrimVoid:
4508 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004509 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004510 }
Calin Juravle52c48962014-12-16 17:02:57 +00004511
Mark Mendellea5af682015-10-22 17:35:49 -04004512 if (!maybe_record_implicit_null_check_done) {
4513 codegen_->MaybeRecordImplicitNullCheck(instruction);
4514 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004515
4516 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4517 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4518 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004519 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004520 }
4521
Calin Juravle52c48962014-12-16 17:02:57 +00004522 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004523 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004524 }
4525}
4526
4527void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4528 HandleFieldSet(instruction, instruction->GetFieldInfo());
4529}
4530
4531void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004532 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004533}
4534
4535void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004536 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004537}
4538
4539void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004540 HandleFieldGet(instruction, instruction->GetFieldInfo());
4541}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004542
Calin Juravle52c48962014-12-16 17:02:57 +00004543void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4544 HandleFieldGet(instruction);
4545}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004546
Calin Juravle52c48962014-12-16 17:02:57 +00004547void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4548 HandleFieldGet(instruction, instruction->GetFieldInfo());
4549}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004550
Calin Juravle52c48962014-12-16 17:02:57 +00004551void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4552 HandleFieldSet(instruction, instruction->GetFieldInfo());
4553}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004554
Calin Juravle52c48962014-12-16 17:02:57 +00004555void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004556 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004557}
4558
Calin Juravlee460d1d2015-09-29 04:52:17 +01004559void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4560 HUnresolvedInstanceFieldGet* instruction) {
4561 FieldAccessCallingConventionX86_64 calling_convention;
4562 codegen_->CreateUnresolvedFieldLocationSummary(
4563 instruction, instruction->GetFieldType(), calling_convention);
4564}
4565
4566void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4567 HUnresolvedInstanceFieldGet* instruction) {
4568 FieldAccessCallingConventionX86_64 calling_convention;
4569 codegen_->GenerateUnresolvedFieldAccess(instruction,
4570 instruction->GetFieldType(),
4571 instruction->GetFieldIndex(),
4572 instruction->GetDexPc(),
4573 calling_convention);
4574}
4575
4576void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4577 HUnresolvedInstanceFieldSet* instruction) {
4578 FieldAccessCallingConventionX86_64 calling_convention;
4579 codegen_->CreateUnresolvedFieldLocationSummary(
4580 instruction, instruction->GetFieldType(), calling_convention);
4581}
4582
4583void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4584 HUnresolvedInstanceFieldSet* instruction) {
4585 FieldAccessCallingConventionX86_64 calling_convention;
4586 codegen_->GenerateUnresolvedFieldAccess(instruction,
4587 instruction->GetFieldType(),
4588 instruction->GetFieldIndex(),
4589 instruction->GetDexPc(),
4590 calling_convention);
4591}
4592
4593void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4594 HUnresolvedStaticFieldGet* instruction) {
4595 FieldAccessCallingConventionX86_64 calling_convention;
4596 codegen_->CreateUnresolvedFieldLocationSummary(
4597 instruction, instruction->GetFieldType(), calling_convention);
4598}
4599
4600void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4601 HUnresolvedStaticFieldGet* instruction) {
4602 FieldAccessCallingConventionX86_64 calling_convention;
4603 codegen_->GenerateUnresolvedFieldAccess(instruction,
4604 instruction->GetFieldType(),
4605 instruction->GetFieldIndex(),
4606 instruction->GetDexPc(),
4607 calling_convention);
4608}
4609
4610void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4611 HUnresolvedStaticFieldSet* instruction) {
4612 FieldAccessCallingConventionX86_64 calling_convention;
4613 codegen_->CreateUnresolvedFieldLocationSummary(
4614 instruction, instruction->GetFieldType(), calling_convention);
4615}
4616
4617void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4618 HUnresolvedStaticFieldSet* instruction) {
4619 FieldAccessCallingConventionX86_64 calling_convention;
4620 codegen_->GenerateUnresolvedFieldAccess(instruction,
4621 instruction->GetFieldType(),
4622 instruction->GetFieldIndex(),
4623 instruction->GetDexPc(),
4624 calling_convention);
4625}
4626
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004627void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004628 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4629 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4630 ? Location::RequiresRegister()
4631 : Location::Any();
4632 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004633}
4634
Calin Juravle2ae48182016-03-16 14:05:09 +00004635void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4636 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004637 return;
4638 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004639 LocationSummary* locations = instruction->GetLocations();
4640 Location obj = locations->InAt(0);
4641
4642 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004643 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004644}
4645
Calin Juravle2ae48182016-03-16 14:05:09 +00004646void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004647 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004648 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004649
4650 LocationSummary* locations = instruction->GetLocations();
4651 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004652
4653 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004654 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004655 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004656 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004657 } else {
4658 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004659 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004660 __ jmp(slow_path->GetEntryLabel());
4661 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004662 }
4663 __ j(kEqual, slow_path->GetEntryLabel());
4664}
4665
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004666void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004667 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004668}
4669
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004670void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004671 bool object_array_get_with_read_barrier =
4672 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004673 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004674 new (GetGraph()->GetArena()) LocationSummary(instruction,
4675 object_array_get_with_read_barrier ?
4676 LocationSummary::kCallOnSlowPath :
4677 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004678 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004679 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004680 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004681 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004682 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004683 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4684 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4685 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004686 // The output overlaps for an object array get when read barriers
4687 // are enabled: we do not want the move to overwrite the array's
4688 // location, as we need it to emit the read barrier.
4689 locations->SetOut(
4690 Location::RequiresRegister(),
4691 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004692 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004693}
4694
4695void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4696 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004697 Location obj_loc = locations->InAt(0);
4698 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004699 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004700 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004701 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004702
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004703 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004704 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004705 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004706 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004707 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004708 break;
4709 }
4710
4711 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004712 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004713 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004714 break;
4715 }
4716
4717 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004719 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004720 break;
4721 }
4722
4723 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004724 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004725 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4726 // Branch cases into compressed and uncompressed for each index's type.
4727 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4728 NearLabel done, not_compressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004729 __ testl(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004730 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004731 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4732 "Expecting 0=compressed, 1=uncompressed");
4733 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004734 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4735 __ jmp(&done);
4736 __ Bind(&not_compressed);
4737 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4738 __ Bind(&done);
4739 } else {
4740 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4741 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004742 break;
4743 }
4744
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004745 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004746 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004747 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004748 break;
4749 }
4750
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004751 case Primitive::kPrimNot: {
4752 static_assert(
4753 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4754 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004755 // /* HeapReference<Object> */ out =
4756 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4757 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004758 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004759 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004760 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004761 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004762 } else {
4763 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004764 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4765 codegen_->MaybeRecordImplicitNullCheck(instruction);
4766 // If read barriers are enabled, emit read barriers other than
4767 // Baker's using a slow path (and also unpoison the loaded
4768 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004769 if (index.IsConstant()) {
4770 uint32_t offset =
4771 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004772 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4773 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004774 codegen_->MaybeGenerateReadBarrierSlow(
4775 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4776 }
4777 }
4778 break;
4779 }
4780
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004781 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004782 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004783 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004784 break;
4785 }
4786
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004787 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004788 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004789 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004790 break;
4791 }
4792
4793 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004794 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004795 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004796 break;
4797 }
4798
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004799 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004800 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004801 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004802 }
Roland Levillain4d027112015-07-01 15:41:14 +01004803
4804 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004805 // Potential implicit null checks, in the case of reference
4806 // arrays, are handled in the previous switch statement.
4807 } else {
4808 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004809 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004810}
4811
4812void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004813 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004814
4815 bool needs_write_barrier =
4816 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004817 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004818
Nicolas Geoffray39468442014-09-02 15:17:15 +01004819 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004820 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004821 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004822 LocationSummary::kCallOnSlowPath :
4823 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004824
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004825 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004826 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4827 if (Primitive::IsFloatingPointType(value_type)) {
4828 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004829 } else {
4830 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4831 }
4832
4833 if (needs_write_barrier) {
4834 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004835 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004836 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004837 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004838}
4839
4840void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4841 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004842 Location array_loc = locations->InAt(0);
4843 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004844 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004845 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004846 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004847 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004848 bool needs_write_barrier =
4849 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004850 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4851 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4852 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004853
4854 switch (value_type) {
4855 case Primitive::kPrimBoolean:
4856 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004857 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004858 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004859 if (value.IsRegister()) {
4860 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004861 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004862 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004863 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004864 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004865 break;
4866 }
4867
4868 case Primitive::kPrimShort:
4869 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004870 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004871 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004872 if (value.IsRegister()) {
4873 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004874 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004875 DCHECK(value.IsConstant()) << value;
4876 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004877 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004878 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004879 break;
4880 }
4881
4882 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004883 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004884 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004885
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004886 if (!value.IsRegister()) {
4887 // Just setting null.
4888 DCHECK(instruction->InputAt(2)->IsNullConstant());
4889 DCHECK(value.IsConstant()) << value;
4890 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004891 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004892 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004893 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004894 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004895 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004896
4897 DCHECK(needs_write_barrier);
4898 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004899 // We cannot use a NearLabel for `done`, as its range may be too
4900 // short when Baker read barriers are enabled.
4901 Label done;
4902 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004903 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004904 Location temp_loc = locations->GetTemp(0);
4905 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004906 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004907 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4908 codegen_->AddSlowPath(slow_path);
4909 if (instruction->GetValueCanBeNull()) {
4910 __ testl(register_value, register_value);
4911 __ j(kNotEqual, &not_null);
4912 __ movl(address, Immediate(0));
4913 codegen_->MaybeRecordImplicitNullCheck(instruction);
4914 __ jmp(&done);
4915 __ Bind(&not_null);
4916 }
4917
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004918 // Note that when Baker read barriers are enabled, the type
4919 // checks are performed without read barriers. This is fine,
4920 // even in the case where a class object is in the from-space
4921 // after the flip, as a comparison involving such a type would
4922 // not produce a false positive; it may of course produce a
4923 // false negative, in which case we would take the ArraySet
4924 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004925
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004926 // /* HeapReference<Class> */ temp = array->klass_
4927 __ movl(temp, Address(array, class_offset));
4928 codegen_->MaybeRecordImplicitNullCheck(instruction);
4929 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004930
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004931 // /* HeapReference<Class> */ temp = temp->component_type_
4932 __ movl(temp, Address(temp, component_offset));
4933 // If heap poisoning is enabled, no need to unpoison `temp`
4934 // nor the object reference in `register_value->klass`, as
4935 // we are comparing two poisoned references.
4936 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004937
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004938 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4939 __ j(kEqual, &do_put);
4940 // If heap poisoning is enabled, the `temp` reference has
4941 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004942 __ MaybeUnpoisonHeapReference(temp);
4943
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004944 // If heap poisoning is enabled, no need to unpoison the
4945 // heap reference loaded below, as it is only used for a
4946 // comparison with null.
4947 __ cmpl(Address(temp, super_offset), Immediate(0));
4948 __ j(kNotEqual, slow_path->GetEntryLabel());
4949 __ Bind(&do_put);
4950 } else {
4951 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004952 }
4953 }
4954
4955 if (kPoisonHeapReferences) {
4956 __ movl(temp, register_value);
4957 __ PoisonHeapReference(temp);
4958 __ movl(address, temp);
4959 } else {
4960 __ movl(address, register_value);
4961 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004962 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004963 codegen_->MaybeRecordImplicitNullCheck(instruction);
4964 }
4965
4966 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4967 codegen_->MarkGCCard(
4968 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4969 __ Bind(&done);
4970
4971 if (slow_path != nullptr) {
4972 __ Bind(slow_path->GetExitLabel());
4973 }
4974
4975 break;
4976 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004977
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004978 case Primitive::kPrimInt: {
4979 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004980 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004981 if (value.IsRegister()) {
4982 __ movl(address, value.AsRegister<CpuRegister>());
4983 } else {
4984 DCHECK(value.IsConstant()) << value;
4985 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4986 __ movl(address, Immediate(v));
4987 }
4988 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004989 break;
4990 }
4991
4992 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004993 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004994 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004995 if (value.IsRegister()) {
4996 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004997 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004998 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004999 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005000 Address address_high =
5001 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005002 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005003 }
5004 break;
5005 }
5006
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005007 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005008 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005009 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005010 if (value.IsFpuRegister()) {
5011 __ movss(address, value.AsFpuRegister<XmmRegister>());
5012 } else {
5013 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005014 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005015 __ movl(address, Immediate(v));
5016 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005017 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005018 break;
5019 }
5020
5021 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005022 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005023 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005024 if (value.IsFpuRegister()) {
5025 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5026 codegen_->MaybeRecordImplicitNullCheck(instruction);
5027 } else {
5028 int64_t v =
5029 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005030 Address address_high =
5031 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005032 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5033 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005034 break;
5035 }
5036
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005037 case Primitive::kPrimVoid:
5038 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005039 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005040 }
5041}
5042
5043void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005044 LocationSummary* locations =
5045 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005046 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005047 if (!instruction->IsEmittedAtUseSite()) {
5048 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5049 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005050}
5051
5052void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005053 if (instruction->IsEmittedAtUseSite()) {
5054 return;
5055 }
5056
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005057 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005058 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005059 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5060 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005061 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005062 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005063 // Mask out most significant bit in case the array is String's array of char.
5064 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005065 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005066 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005067}
5068
5069void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005070 RegisterSet caller_saves = RegisterSet::Empty();
5071 InvokeRuntimeCallingConvention calling_convention;
5072 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5073 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5074 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005075 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005076 HInstruction* length = instruction->InputAt(1);
5077 if (!length->IsEmittedAtUseSite()) {
5078 locations->SetInAt(1, Location::RegisterOrConstant(length));
5079 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005080}
5081
5082void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5083 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005084 Location index_loc = locations->InAt(0);
5085 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04005086 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005087
Mark Mendell99dbd682015-04-22 16:18:52 -04005088 if (length_loc.IsConstant()) {
5089 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5090 if (index_loc.IsConstant()) {
5091 // BCE will remove the bounds check if we are guarenteed to pass.
5092 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5093 if (index < 0 || index >= length) {
5094 codegen_->AddSlowPath(slow_path);
5095 __ jmp(slow_path->GetEntryLabel());
5096 } else {
5097 // Some optimization after BCE may have generated this, and we should not
5098 // generate a bounds check if it is a valid range.
5099 }
5100 return;
5101 }
5102
5103 // We have to reverse the jump condition because the length is the constant.
5104 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5105 __ cmpl(index_reg, Immediate(length));
5106 codegen_->AddSlowPath(slow_path);
5107 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005108 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005109 HInstruction* array_length = instruction->InputAt(1);
5110 if (array_length->IsEmittedAtUseSite()) {
5111 // Address the length field in the array.
5112 DCHECK(array_length->IsArrayLength());
5113 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5114 Location array_loc = array_length->GetLocations()->InAt(0);
5115 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005116 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005117 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5118 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005119 CpuRegister length_reg = CpuRegister(TMP);
5120 __ movl(length_reg, array_len);
5121 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005122 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005123 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005124 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005125 // Checking the bound for general case:
5126 // Array of char or String's array when the compression feature off.
5127 if (index_loc.IsConstant()) {
5128 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5129 __ cmpl(array_len, Immediate(value));
5130 } else {
5131 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5132 }
5133 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005134 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005135 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005136 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005137 }
5138 codegen_->AddSlowPath(slow_path);
5139 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005140 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005141}
5142
5143void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5144 CpuRegister card,
5145 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005146 CpuRegister value,
5147 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005148 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005149 if (value_can_be_null) {
5150 __ testl(value, value);
5151 __ j(kEqual, &is_null);
5152 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005153 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005154 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005155 __ movq(temp, object);
5156 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005157 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005158 if (value_can_be_null) {
5159 __ Bind(&is_null);
5160 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005161}
5162
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005163void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005164 LOG(FATAL) << "Unimplemented";
5165}
5166
5167void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005168 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5169}
5170
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005171void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005172 LocationSummary* locations =
5173 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005174 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005175}
5176
5177void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005178 HBasicBlock* block = instruction->GetBlock();
5179 if (block->GetLoopInformation() != nullptr) {
5180 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5181 // The back edge will generate the suspend check.
5182 return;
5183 }
5184 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5185 // The goto will generate the suspend check.
5186 return;
5187 }
5188 GenerateSuspendCheck(instruction, nullptr);
5189}
5190
5191void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5192 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005193 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005194 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5195 if (slow_path == nullptr) {
5196 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
5197 instruction->SetSlowPath(slow_path);
5198 codegen_->AddSlowPath(slow_path);
5199 if (successor != nullptr) {
5200 DCHECK(successor->IsLoopHeader());
5201 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5202 }
5203 } else {
5204 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5205 }
5206
Andreas Gampe542451c2016-07-26 09:02:02 -07005207 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005208 /* no_rip */ true),
5209 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005210 if (successor == nullptr) {
5211 __ j(kNotEqual, slow_path->GetEntryLabel());
5212 __ Bind(slow_path->GetReturnLabel());
5213 } else {
5214 __ j(kEqual, codegen_->GetLabelOf(successor));
5215 __ jmp(slow_path->GetEntryLabel());
5216 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005217}
5218
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005219X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5220 return codegen_->GetAssembler();
5221}
5222
5223void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005224 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005225 Location source = move->GetSource();
5226 Location destination = move->GetDestination();
5227
5228 if (source.IsRegister()) {
5229 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005230 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005231 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005232 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005233 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005234 } else {
5235 DCHECK(destination.IsDoubleStackSlot());
5236 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005237 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005238 }
5239 } else if (source.IsStackSlot()) {
5240 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005241 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005242 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005243 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005244 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005245 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005246 } else {
5247 DCHECK(destination.IsStackSlot());
5248 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5249 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5250 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005251 } else if (source.IsDoubleStackSlot()) {
5252 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005253 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005254 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005255 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005256 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5257 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005258 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005259 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005260 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5261 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5262 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005263 } else if (source.IsConstant()) {
5264 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005265 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5266 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005267 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005268 if (value == 0) {
5269 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5270 } else {
5271 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5272 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005273 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005274 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005275 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005276 }
5277 } else if (constant->IsLongConstant()) {
5278 int64_t value = constant->AsLongConstant()->GetValue();
5279 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005280 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005281 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005282 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005283 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005284 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005285 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005286 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005287 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005288 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005289 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005290 } else {
5291 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005292 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005293 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5294 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005295 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005296 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005297 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005298 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005299 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005300 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005301 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005302 } else {
5303 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005304 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005305 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005306 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005307 } else if (source.IsFpuRegister()) {
5308 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005309 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005310 } else if (destination.IsStackSlot()) {
5311 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005312 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005313 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005314 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005315 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005316 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005317 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005318 }
5319}
5320
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005321void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005322 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005323 __ movl(Address(CpuRegister(RSP), mem), reg);
5324 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005325}
5326
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005327void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005328 ScratchRegisterScope ensure_scratch(
5329 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5330
5331 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5332 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5333 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5334 Address(CpuRegister(RSP), mem2 + stack_offset));
5335 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5336 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5337 CpuRegister(ensure_scratch.GetRegister()));
5338}
5339
Mark Mendell8a1c7282015-06-29 15:41:28 -04005340void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5341 __ movq(CpuRegister(TMP), reg1);
5342 __ movq(reg1, reg2);
5343 __ movq(reg2, CpuRegister(TMP));
5344}
5345
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005346void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5347 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5348 __ movq(Address(CpuRegister(RSP), mem), reg);
5349 __ movq(reg, CpuRegister(TMP));
5350}
5351
5352void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5353 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005354 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005355
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005356 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5357 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5358 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5359 Address(CpuRegister(RSP), mem2 + stack_offset));
5360 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5361 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5362 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005363}
5364
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005365void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5366 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5367 __ movss(Address(CpuRegister(RSP), mem), reg);
5368 __ movd(reg, CpuRegister(TMP));
5369}
5370
5371void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5372 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5373 __ movsd(Address(CpuRegister(RSP), mem), reg);
5374 __ movd(reg, CpuRegister(TMP));
5375}
5376
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005377void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005378 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005379 Location source = move->GetSource();
5380 Location destination = move->GetDestination();
5381
5382 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005383 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005384 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005385 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005386 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005387 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005388 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005389 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5390 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005391 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005392 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005393 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005394 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5395 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005396 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005397 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5398 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5399 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005400 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005401 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005402 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005403 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005404 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005405 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005406 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005407 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005408 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005409 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005410 }
5411}
5412
5413
5414void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5415 __ pushq(CpuRegister(reg));
5416}
5417
5418
5419void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5420 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005421}
5422
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005423void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005424 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005425 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5426 Immediate(mirror::Class::kStatusInitialized));
5427 __ j(kLess, slow_path->GetEntryLabel());
5428 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005429 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005430}
5431
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005432HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5433 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005434 switch (desired_class_load_kind) {
5435 case HLoadClass::LoadKind::kReferrersClass:
5436 break;
5437 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5438 DCHECK(!GetCompilerOptions().GetCompilePic());
5439 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5440 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5441 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5442 DCHECK(GetCompilerOptions().GetCompilePic());
5443 break;
5444 case HLoadClass::LoadKind::kBootImageAddress:
5445 break;
5446 case HLoadClass::LoadKind::kDexCacheAddress:
5447 DCHECK(Runtime::Current()->UseJitCompilation());
5448 break;
5449 case HLoadClass::LoadKind::kDexCachePcRelative:
5450 DCHECK(!Runtime::Current()->UseJitCompilation());
5451 break;
5452 case HLoadClass::LoadKind::kDexCacheViaMethod:
5453 break;
5454 }
5455 return desired_class_load_kind;
5456}
5457
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005458void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005459 if (cls->NeedsAccessCheck()) {
5460 InvokeRuntimeCallingConvention calling_convention;
5461 CodeGenerator::CreateLoadClassLocationSummary(
5462 cls,
5463 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5464 Location::RegisterLocation(RAX),
5465 /* code_generator_supports_read_barrier */ true);
5466 return;
5467 }
5468
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005469 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5470 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005471 ? LocationSummary::kCallOnSlowPath
5472 : LocationSummary::kNoCall;
5473 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005474 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005475 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005476 }
5477
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005478 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5479 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5480 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5481 locations->SetInAt(0, Location::RequiresRegister());
5482 }
5483 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005484}
5485
5486void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005487 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005488 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08005489 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescuba45db02016-07-12 22:53:02 +01005490 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005491 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005492 return;
5493 }
5494
Roland Levillain0d5a2812015-11-13 10:07:31 +00005495 Location out_loc = locations->Out();
5496 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005497
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005498 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5499 ? kWithoutReadBarrier
5500 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005501 bool generate_null_check = false;
5502 switch (cls->GetLoadKind()) {
5503 case HLoadClass::LoadKind::kReferrersClass: {
5504 DCHECK(!cls->CanCallRuntime());
5505 DCHECK(!cls->MustGenerateClinitCheck());
5506 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5507 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5508 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005509 cls,
5510 out_loc,
5511 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005512 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005513 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005514 break;
5515 }
5516 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005517 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005518 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5519 codegen_->RecordTypePatch(cls);
5520 break;
5521 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005522 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005523 DCHECK_NE(cls->GetAddress(), 0u);
5524 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5525 __ movl(out, Immediate(address)); // Zero-extended.
5526 codegen_->RecordSimplePatch();
5527 break;
5528 }
5529 case HLoadClass::LoadKind::kDexCacheAddress: {
5530 DCHECK_NE(cls->GetAddress(), 0u);
5531 // /* GcRoot<mirror::Class> */ out = *address
5532 if (IsUint<32>(cls->GetAddress())) {
5533 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005534 GenerateGcRootFieldLoad(cls,
5535 out_loc,
5536 address,
Roland Levillain00468f32016-10-27 18:02:48 +01005537 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005538 read_barrier_option);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005539 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005540 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5541 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005542 GenerateGcRootFieldLoad(cls,
5543 out_loc,
5544 Address(out, 0),
Roland Levillain00468f32016-10-27 18:02:48 +01005545 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005546 read_barrier_option);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005547 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005548 generate_null_check = !cls->IsInDexCache();
5549 break;
5550 }
5551 case HLoadClass::LoadKind::kDexCachePcRelative: {
5552 uint32_t offset = cls->GetDexCacheElementOffset();
5553 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5554 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5555 /* no_rip */ false);
5556 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005557 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005558 generate_null_check = !cls->IsInDexCache();
5559 break;
5560 }
5561 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5562 // /* GcRoot<mirror::Class>[] */ out =
5563 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5564 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5565 __ movq(out,
5566 Address(current_method,
5567 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5568 // /* GcRoot<mirror::Class> */ out = out[type_index]
5569 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005570 cls,
5571 out_loc,
Andreas Gampea5b09a62016-11-17 15:21:22 -08005572 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
Roland Levillain00468f32016-10-27 18:02:48 +01005573 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005574 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005575 generate_null_check = !cls->IsInDexCache();
5576 break;
5577 }
5578 default:
5579 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5580 UNREACHABLE();
5581 }
5582
5583 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5584 DCHECK(cls->CanCallRuntime());
5585 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5586 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5587 codegen_->AddSlowPath(slow_path);
5588 if (generate_null_check) {
5589 __ testl(out, out);
5590 __ j(kEqual, slow_path->GetEntryLabel());
5591 }
5592 if (cls->MustGenerateClinitCheck()) {
5593 GenerateClassInitializationCheck(slow_path, out);
5594 } else {
5595 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005596 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005597 }
5598}
5599
5600void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5601 LocationSummary* locations =
5602 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5603 locations->SetInAt(0, Location::RequiresRegister());
5604 if (check->HasUses()) {
5605 locations->SetOut(Location::SameAsFirstInput());
5606 }
5607}
5608
5609void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005610 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005611 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005612 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005613 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005614 GenerateClassInitializationCheck(slow_path,
5615 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005616}
5617
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005618HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5619 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005620 switch (desired_string_load_kind) {
5621 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5622 DCHECK(!GetCompilerOptions().GetCompilePic());
5623 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5624 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5625 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5626 DCHECK(GetCompilerOptions().GetCompilePic());
5627 break;
5628 case HLoadString::LoadKind::kBootImageAddress:
5629 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00005630 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005631 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005632 break;
5633 case HLoadString::LoadKind::kDexCacheViaMethod:
5634 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005635 case HLoadString::LoadKind::kJitTableAddress:
5636 DCHECK(Runtime::Current()->UseJitCompilation());
5637 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005638 }
5639 return desired_string_load_kind;
5640}
5641
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005642void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005643 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005644 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005645 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005646 locations->SetOut(Location::RegisterLocation(RAX));
5647 } else {
5648 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005649 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5650 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5651 // Rely on the pResolveString and/or marking to save everything.
5652 // Custom calling convention: RAX serves as both input and output.
5653 RegisterSet caller_saves = RegisterSet::Empty();
5654 caller_saves.Add(Location::RegisterLocation(RAX));
5655 locations->SetCustomSlowPathCallerSaves(caller_saves);
5656 } else {
5657 // For non-Baker read barrier we have a temp-clobbering call.
5658 }
5659 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005660 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005661}
5662
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005663Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
5664 dex::StringIndex dex_index) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005665 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index), /* placeholder */ 0u);
5666 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005667 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005668 PatchInfo<Label>* info = &jit_string_patches_.back();
5669 return &info->label;
5670}
5671
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005672void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005673 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005674 Location out_loc = locations->Out();
5675 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005676
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005677 switch (load->GetLoadKind()) {
5678 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005679 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoaad75c62016-10-03 08:46:48 +00005680 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005681 return; // No dex cache slow path.
5682 }
5683 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005684 DCHECK_NE(load->GetAddress(), 0u);
5685 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5686 __ movl(out, Immediate(address)); // Zero-extended.
5687 codegen_->RecordSimplePatch();
5688 return; // No dex cache slow path.
5689 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005690 case HLoadString::LoadKind::kBssEntry: {
5691 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5692 /* no_rip */ false);
5693 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
5694 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005695 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005696 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
5697 codegen_->AddSlowPath(slow_path);
5698 __ testl(out, out);
5699 __ j(kEqual, slow_path->GetEntryLabel());
5700 __ Bind(slow_path->GetExitLabel());
5701 return;
5702 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005703 case HLoadString::LoadKind::kJitTableAddress: {
5704 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5705 /* no_rip */ true);
5706 Label* fixup_label =
5707 codegen_->NewJitRootStringPatch(load->GetDexFile(), load->GetStringIndex());
5708 // /* GcRoot<mirror::String> */ out = *address
5709 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
5710 return;
5711 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005712 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005713 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005714 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005715
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005716 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005717 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005718 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005719 codegen_->InvokeRuntime(kQuickResolveString,
5720 load,
5721 load->GetDexPc());
5722 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005723}
5724
David Brazdilcb1c0552015-08-04 16:22:25 +01005725static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005726 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005727 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005728}
5729
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005730void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5731 LocationSummary* locations =
5732 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5733 locations->SetOut(Location::RequiresRegister());
5734}
5735
5736void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005737 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5738}
5739
5740void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5741 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5742}
5743
5744void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5745 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005746}
5747
5748void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5749 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005750 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005751 InvokeRuntimeCallingConvention calling_convention;
5752 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5753}
5754
5755void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005756 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005757 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005758}
5759
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005760static bool CheckCastTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5761 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07005762 // We need a temporary for holding the iftable length.
5763 return true;
5764 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005765 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005766 !kUseBakerReadBarrier &&
5767 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005768 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5769 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5770}
5771
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005772static bool InstanceOfTypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5773 return kEmitCompilerReadBarrier &&
5774 !kUseBakerReadBarrier &&
5775 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5776 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5777 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5778}
5779
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005780void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005781 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005782 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005783 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005784 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005785 case TypeCheckKind::kExactCheck:
5786 case TypeCheckKind::kAbstractClassCheck:
5787 case TypeCheckKind::kClassHierarchyCheck:
5788 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005789 call_kind =
5790 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005791 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005792 break;
5793 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005794 case TypeCheckKind::kUnresolvedCheck:
5795 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 call_kind = LocationSummary::kCallOnSlowPath;
5797 break;
5798 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005799
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005800 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005801 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005802 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005803 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005804 locations->SetInAt(0, Location::RequiresRegister());
5805 locations->SetInAt(1, Location::Any());
5806 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5807 locations->SetOut(Location::RequiresRegister());
5808 // When read barriers are enabled, we need a temporary register for
5809 // some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005810 if (InstanceOfTypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005811 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005812 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005813}
5814
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005815void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005816 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005817 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005818 Location obj_loc = locations->InAt(0);
5819 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005820 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005821 Location out_loc = locations->Out();
5822 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00005823 Location maybe_temp_loc = InstanceOfTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005824 locations->GetTemp(0) :
5825 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005826 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005827 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5828 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5829 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005830 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005831 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005832
5833 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005834 // Avoid null check if we know obj is not null.
5835 if (instruction->MustDoNullCheck()) {
5836 __ testl(obj, obj);
5837 __ j(kEqual, &zero);
5838 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005839
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005840 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005841 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005842 // /* HeapReference<Class> */ out = obj->klass_
5843 GenerateReferenceLoadTwoRegisters(instruction,
5844 out_loc,
5845 obj_loc,
5846 class_offset,
5847 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005848 if (cls.IsRegister()) {
5849 __ cmpl(out, cls.AsRegister<CpuRegister>());
5850 } else {
5851 DCHECK(cls.IsStackSlot()) << cls;
5852 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5853 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005854 if (zero.IsLinked()) {
5855 // Classes must be equal for the instanceof to succeed.
5856 __ j(kNotEqual, &zero);
5857 __ movl(out, Immediate(1));
5858 __ jmp(&done);
5859 } else {
5860 __ setcc(kEqual, out);
5861 // setcc only sets the low byte.
5862 __ andl(out, Immediate(1));
5863 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005864 break;
5865 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005867 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005868 // /* HeapReference<Class> */ out = obj->klass_
5869 GenerateReferenceLoadTwoRegisters(instruction,
5870 out_loc,
5871 obj_loc,
5872 class_offset,
5873 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005874 // If the class is abstract, we eagerly fetch the super class of the
5875 // object to avoid doing a comparison we know will fail.
5876 NearLabel loop, success;
5877 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005878 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005879 GenerateReferenceLoadOneRegister(instruction,
5880 out_loc,
5881 super_offset,
5882 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005883 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005884 __ testl(out, out);
5885 // If `out` is null, we use it for the result, and jump to `done`.
5886 __ j(kEqual, &done);
5887 if (cls.IsRegister()) {
5888 __ cmpl(out, cls.AsRegister<CpuRegister>());
5889 } else {
5890 DCHECK(cls.IsStackSlot()) << cls;
5891 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5892 }
5893 __ j(kNotEqual, &loop);
5894 __ movl(out, Immediate(1));
5895 if (zero.IsLinked()) {
5896 __ jmp(&done);
5897 }
5898 break;
5899 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005900
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005901 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005902 // /* HeapReference<Class> */ out = obj->klass_
5903 GenerateReferenceLoadTwoRegisters(instruction,
5904 out_loc,
5905 obj_loc,
5906 class_offset,
5907 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005908 // Walk over the class hierarchy to find a match.
5909 NearLabel loop, success;
5910 __ Bind(&loop);
5911 if (cls.IsRegister()) {
5912 __ cmpl(out, cls.AsRegister<CpuRegister>());
5913 } else {
5914 DCHECK(cls.IsStackSlot()) << cls;
5915 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5916 }
5917 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005918 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005919 GenerateReferenceLoadOneRegister(instruction,
5920 out_loc,
5921 super_offset,
5922 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005923 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005924 __ testl(out, out);
5925 __ j(kNotEqual, &loop);
5926 // If `out` is null, we use it for the result, and jump to `done`.
5927 __ jmp(&done);
5928 __ Bind(&success);
5929 __ movl(out, Immediate(1));
5930 if (zero.IsLinked()) {
5931 __ jmp(&done);
5932 }
5933 break;
5934 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005935
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005936 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005937 // /* HeapReference<Class> */ out = obj->klass_
5938 GenerateReferenceLoadTwoRegisters(instruction,
5939 out_loc,
5940 obj_loc,
5941 class_offset,
5942 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005943 // Do an exact check.
5944 NearLabel exact_check;
5945 if (cls.IsRegister()) {
5946 __ cmpl(out, cls.AsRegister<CpuRegister>());
5947 } else {
5948 DCHECK(cls.IsStackSlot()) << cls;
5949 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5950 }
5951 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005952 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005953 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005954 GenerateReferenceLoadOneRegister(instruction,
5955 out_loc,
5956 component_offset,
5957 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005958 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005959 __ testl(out, out);
5960 // If `out` is null, we use it for the result, and jump to `done`.
5961 __ j(kEqual, &done);
5962 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5963 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005964 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005965 __ movl(out, Immediate(1));
5966 __ jmp(&done);
5967 break;
5968 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005969
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005970 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08005971 // No read barrier since the slow path will retry upon failure.
5972 // /* HeapReference<Class> */ out = obj->klass_
5973 GenerateReferenceLoadTwoRegisters(instruction,
5974 out_loc,
5975 obj_loc,
5976 class_offset,
5977 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005978 if (cls.IsRegister()) {
5979 __ cmpl(out, cls.AsRegister<CpuRegister>());
5980 } else {
5981 DCHECK(cls.IsStackSlot()) << cls;
5982 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5983 }
5984 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005985 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5986 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005987 codegen_->AddSlowPath(slow_path);
5988 __ j(kNotEqual, slow_path->GetEntryLabel());
5989 __ movl(out, Immediate(1));
5990 if (zero.IsLinked()) {
5991 __ jmp(&done);
5992 }
5993 break;
5994 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005995
Calin Juravle98893e12015-10-02 21:05:03 +01005996 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005997 case TypeCheckKind::kInterfaceCheck: {
5998 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005999 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006000 // cases.
6001 //
6002 // We cannot directly call the InstanceofNonTrivial runtime
6003 // entry point without resorting to a type checking slow path
6004 // here (i.e. by calling InvokeRuntime directly), as it would
6005 // require to assign fixed registers for the inputs of this
6006 // HInstanceOf instruction (following the runtime calling
6007 // convention), which might be cluttered by the potential first
6008 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006009 //
6010 // TODO: Introduce a new runtime entry point taking the object
6011 // to test (instead of its class) as argument, and let it deal
6012 // with the read barrier issues. This will let us refactor this
6013 // case of the `switch` code as it was previously (with a direct
6014 // call to the runtime not using a type checking slow path).
6015 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006016 DCHECK(locations->OnlyCallsOnSlowPath());
6017 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6018 /* is_fatal */ false);
6019 codegen_->AddSlowPath(slow_path);
6020 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006021 if (zero.IsLinked()) {
6022 __ jmp(&done);
6023 }
6024 break;
6025 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006026 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006027
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006028 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006029 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006030 __ xorl(out, out);
6031 }
6032
6033 if (done.IsLinked()) {
6034 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006035 }
6036
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006037 if (slow_path != nullptr) {
6038 __ Bind(slow_path->GetExitLabel());
6039 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006040}
6041
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006042static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006043 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006044 case TypeCheckKind::kExactCheck:
6045 case TypeCheckKind::kAbstractClassCheck:
6046 case TypeCheckKind::kClassHierarchyCheck:
6047 case TypeCheckKind::kArrayObjectCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006048 return !throws_into_catch && !kEmitCompilerReadBarrier;
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006049 case TypeCheckKind::kInterfaceCheck:
6050 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006051 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006052 case TypeCheckKind::kUnresolvedCheck:
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006053 return false;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006054 }
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006055 LOG(FATAL) << "Unreachable";
6056 UNREACHABLE();
6057}
6058
6059void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
6060 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
6061 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6062 bool is_fatal_slow_path = IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch);
6063 LocationSummary::CallKind call_kind = is_fatal_slow_path
6064 ? LocationSummary::kNoCall
6065 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006066 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6067 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006068 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6069 // Require a register for the interface check since there is a loop that compares the class to
6070 // a memory address.
6071 locations->SetInAt(1, Location::RequiresRegister());
6072 } else {
6073 locations->SetInAt(1, Location::Any());
6074 }
6075
Roland Levillain0d5a2812015-11-13 10:07:31 +00006076 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
6077 locations->AddTemp(Location::RequiresRegister());
6078 // When read barriers are enabled, we need an additional temporary
6079 // register for some cases.
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006080 if (CheckCastTypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006081 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006082 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006083}
6084
6085void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006086 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006087 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006088 Location obj_loc = locations->InAt(0);
6089 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006090 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006091 Location temp_loc = locations->GetTemp(0);
6092 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006093 Location maybe_temp2_loc = CheckCastTypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006094 locations->GetTemp(1) :
6095 Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006096 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6097 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6098 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6099 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6100 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6101 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006102 const uint32_t object_array_data_offset =
6103 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006104
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006105 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6106 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6107 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006108 bool is_type_check_slow_path_fatal =
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006109 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006110 SlowPathCode* type_check_slow_path =
6111 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
6112 is_type_check_slow_path_fatal);
6113 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006114
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006115
6116 NearLabel done;
6117 // Avoid null check if we know obj is not null.
6118 if (instruction->MustDoNullCheck()) {
6119 __ testl(obj, obj);
6120 __ j(kEqual, &done);
6121 }
6122
Roland Levillain0d5a2812015-11-13 10:07:31 +00006123 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006124 case TypeCheckKind::kExactCheck:
6125 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006126 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006127 GenerateReferenceLoadTwoRegisters(instruction,
6128 temp_loc,
6129 obj_loc,
6130 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006131 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006132 if (cls.IsRegister()) {
6133 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6134 } else {
6135 DCHECK(cls.IsStackSlot()) << cls;
6136 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6137 }
6138 // Jump to slow path for throwing the exception or doing a
6139 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006140 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006141 break;
6142 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006143
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006144 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006145 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006146 GenerateReferenceLoadTwoRegisters(instruction,
6147 temp_loc,
6148 obj_loc,
6149 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006150 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006151 // If the class is abstract, we eagerly fetch the super class of the
6152 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006153 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006154 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006155 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006156 GenerateReferenceLoadOneRegister(instruction,
6157 temp_loc,
6158 super_offset,
6159 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006160 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006161
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006162 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6163 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006164 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006165 // Otherwise, compare the classes.
6166 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006167 if (cls.IsRegister()) {
6168 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6169 } else {
6170 DCHECK(cls.IsStackSlot()) << cls;
6171 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6172 }
6173 __ j(kNotEqual, &loop);
6174 break;
6175 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006176
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006177 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006178 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006179 GenerateReferenceLoadTwoRegisters(instruction,
6180 temp_loc,
6181 obj_loc,
6182 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006183 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006184 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006185 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006186 __ Bind(&loop);
6187 if (cls.IsRegister()) {
6188 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6189 } else {
6190 DCHECK(cls.IsStackSlot()) << cls;
6191 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6192 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006193 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006194
Roland Levillain0d5a2812015-11-13 10:07:31 +00006195 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006196 GenerateReferenceLoadOneRegister(instruction,
6197 temp_loc,
6198 super_offset,
6199 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006200 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006201
6202 // If the class reference currently in `temp` is not null, jump
6203 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006204 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006205 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006206 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006207 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006208 break;
6209 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006210
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006211 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006212 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006213 GenerateReferenceLoadTwoRegisters(instruction,
6214 temp_loc,
6215 obj_loc,
6216 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006217 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006218 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006219 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006220 if (cls.IsRegister()) {
6221 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6222 } else {
6223 DCHECK(cls.IsStackSlot()) << cls;
6224 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6225 }
6226 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006227
6228 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006229 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006230 GenerateReferenceLoadOneRegister(instruction,
6231 temp_loc,
6232 component_offset,
6233 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006234 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006235
6236 // If the component type is not null (i.e. the object is indeed
6237 // an array), jump to label `check_non_primitive_component_type`
6238 // to further check that this component type is not a primitive
6239 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006240 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006241 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006242 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006243 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006244 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006245 break;
6246 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006247
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006248 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006249 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006250 //
6251 // We cannot directly call the CheckCast runtime entry point
6252 // without resorting to a type checking slow path here (i.e. by
6253 // calling InvokeRuntime directly), as it would require to
6254 // assign fixed registers for the inputs of this HInstanceOf
6255 // instruction (following the runtime calling convention), which
6256 // might be cluttered by the potential first read barrier
6257 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006258 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006259 break;
6260 }
6261
6262 case TypeCheckKind::kInterfaceCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006263 // Fast path for the interface check. We always go slow path for heap poisoning since
6264 // unpoisoning cls would require an extra temp.
6265 if (!kPoisonHeapReferences) {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006266 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6267 // doing this.
6268 // /* HeapReference<Class> */ temp = obj->klass_
6269 GenerateReferenceLoadTwoRegisters(instruction,
6270 temp_loc,
6271 obj_loc,
6272 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006273 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006274
6275 // /* HeapReference<Class> */ temp = temp->iftable_
6276 GenerateReferenceLoadTwoRegisters(instruction,
6277 temp_loc,
6278 temp_loc,
6279 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006280 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006281 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006282 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006283 // Loop through the iftable and check if any class matches.
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006284 NearLabel start_loop;
6285 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006286 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006287 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006288 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6289 // Go to next interface if the classes do not match.
6290 __ cmpl(cls.AsRegister<CpuRegister>(),
6291 CodeGeneratorX86_64::ArrayAddress(temp,
6292 maybe_temp2_loc,
6293 TIMES_4,
6294 object_array_data_offset));
6295 __ j(kNotEqual, &start_loop); // Return if same class.
6296 } else {
6297 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006298 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006299 break;
6300 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006301
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006302 if (done.IsLinked()) {
6303 __ Bind(&done);
6304 }
6305
Roland Levillain0d5a2812015-11-13 10:07:31 +00006306 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006307}
6308
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006309void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
6310 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006311 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006312 InvokeRuntimeCallingConvention calling_convention;
6313 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6314}
6315
6316void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006317 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006318 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006319 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006320 if (instruction->IsEnter()) {
6321 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6322 } else {
6323 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6324 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006325}
6326
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006327void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6328void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6329void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6330
6331void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6332 LocationSummary* locations =
6333 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6334 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6335 || instruction->GetResultType() == Primitive::kPrimLong);
6336 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006337 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006338 locations->SetOut(Location::SameAsFirstInput());
6339}
6340
6341void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6342 HandleBitwiseOperation(instruction);
6343}
6344
6345void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6346 HandleBitwiseOperation(instruction);
6347}
6348
6349void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6350 HandleBitwiseOperation(instruction);
6351}
6352
6353void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6354 LocationSummary* locations = instruction->GetLocations();
6355 Location first = locations->InAt(0);
6356 Location second = locations->InAt(1);
6357 DCHECK(first.Equals(locations->Out()));
6358
6359 if (instruction->GetResultType() == Primitive::kPrimInt) {
6360 if (second.IsRegister()) {
6361 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006362 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006363 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006364 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006365 } else {
6366 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006367 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006368 }
6369 } else if (second.IsConstant()) {
6370 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6371 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006372 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006373 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006374 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006375 } else {
6376 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006377 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006378 }
6379 } else {
6380 Address address(CpuRegister(RSP), second.GetStackIndex());
6381 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006382 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006383 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006384 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006385 } else {
6386 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006387 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006388 }
6389 }
6390 } else {
6391 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006392 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6393 bool second_is_constant = false;
6394 int64_t value = 0;
6395 if (second.IsConstant()) {
6396 second_is_constant = true;
6397 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006398 }
Mark Mendell40741f32015-04-20 22:10:34 -04006399 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006400
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006401 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006402 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006403 if (is_int32_value) {
6404 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6405 } else {
6406 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6407 }
6408 } else if (second.IsDoubleStackSlot()) {
6409 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006410 } else {
6411 __ andq(first_reg, second.AsRegister<CpuRegister>());
6412 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006413 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006414 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006415 if (is_int32_value) {
6416 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6417 } else {
6418 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6419 }
6420 } else if (second.IsDoubleStackSlot()) {
6421 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006422 } else {
6423 __ orq(first_reg, second.AsRegister<CpuRegister>());
6424 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006425 } else {
6426 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006427 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006428 if (is_int32_value) {
6429 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6430 } else {
6431 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6432 }
6433 } else if (second.IsDoubleStackSlot()) {
6434 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006435 } else {
6436 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6437 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006438 }
6439 }
6440}
6441
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006442void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6443 HInstruction* instruction,
6444 Location out,
6445 uint32_t offset,
6446 Location maybe_temp,
6447 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006448 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006449 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006450 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006451 if (kUseBakerReadBarrier) {
6452 // Load with fast path based Baker's read barrier.
6453 // /* HeapReference<Object> */ out = *(out + offset)
6454 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006455 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006456 } else {
6457 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006458 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006459 // in the following move operation, as we will need it for the
6460 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006461 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006462 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006463 // /* HeapReference<Object> */ out = *(out + offset)
6464 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006465 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006466 }
6467 } else {
6468 // Plain load with no read barrier.
6469 // /* HeapReference<Object> */ out = *(out + offset)
6470 __ movl(out_reg, Address(out_reg, offset));
6471 __ MaybeUnpoisonHeapReference(out_reg);
6472 }
6473}
6474
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006475void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6476 HInstruction* instruction,
6477 Location out,
6478 Location obj,
6479 uint32_t offset,
6480 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006481 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6482 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006483 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006484 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006485 if (kUseBakerReadBarrier) {
6486 // Load with fast path based Baker's read barrier.
6487 // /* HeapReference<Object> */ out = *(obj + offset)
6488 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006489 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006490 } else {
6491 // Load with slow path based read barrier.
6492 // /* HeapReference<Object> */ out = *(obj + offset)
6493 __ movl(out_reg, Address(obj_reg, offset));
6494 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6495 }
6496 } else {
6497 // Plain load with no read barrier.
6498 // /* HeapReference<Object> */ out = *(obj + offset)
6499 __ movl(out_reg, Address(obj_reg, offset));
6500 __ MaybeUnpoisonHeapReference(out_reg);
6501 }
6502}
6503
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006504void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6505 HInstruction* instruction,
6506 Location root,
6507 const Address& address,
6508 Label* fixup_label,
6509 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006510 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006511 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006512 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006513 if (kUseBakerReadBarrier) {
6514 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6515 // Baker's read barrier are used:
6516 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006517 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006518 // if (Thread::Current()->GetIsGcMarking()) {
6519 // root = ReadBarrier::Mark(root)
6520 // }
6521
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006522 // /* GcRoot<mirror::Object> */ root = *address
6523 __ movl(root_reg, address);
6524 if (fixup_label != nullptr) {
6525 __ Bind(fixup_label);
6526 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006527 static_assert(
6528 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6529 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6530 "have different sizes.");
6531 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6532 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6533 "have different sizes.");
6534
Vladimir Marko953437b2016-08-24 08:30:46 +00006535 // Slow path marking the GC root `root`.
6536 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006537 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006538 codegen_->AddSlowPath(slow_path);
6539
Andreas Gampe542451c2016-07-26 09:02:02 -07006540 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006541 /* no_rip */ true),
6542 Immediate(0));
6543 __ j(kNotEqual, slow_path->GetEntryLabel());
6544 __ Bind(slow_path->GetExitLabel());
6545 } else {
6546 // GC root loaded through a slow path for read barriers other
6547 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006548 // /* GcRoot<mirror::Object>* */ root = address
6549 __ leaq(root_reg, address);
6550 if (fixup_label != nullptr) {
6551 __ Bind(fixup_label);
6552 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006553 // /* mirror::Object* */ root = root->Read()
6554 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6555 }
6556 } else {
6557 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006558 // /* GcRoot<mirror::Object> */ root = *address
6559 __ movl(root_reg, address);
6560 if (fixup_label != nullptr) {
6561 __ Bind(fixup_label);
6562 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006563 // Note that GC roots are not affected by heap poisoning, thus we
6564 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006565 }
6566}
6567
6568void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6569 Location ref,
6570 CpuRegister obj,
6571 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006572 bool needs_null_check) {
6573 DCHECK(kEmitCompilerReadBarrier);
6574 DCHECK(kUseBakerReadBarrier);
6575
6576 // /* HeapReference<Object> */ ref = *(obj + offset)
6577 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006578 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006579}
6580
6581void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6582 Location ref,
6583 CpuRegister obj,
6584 uint32_t data_offset,
6585 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006586 bool needs_null_check) {
6587 DCHECK(kEmitCompilerReadBarrier);
6588 DCHECK(kUseBakerReadBarrier);
6589
Roland Levillain3d312422016-06-23 13:53:42 +01006590 static_assert(
6591 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6592 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006593 // /* HeapReference<Object> */ ref =
6594 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006595 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006596 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006597}
6598
6599void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6600 Location ref,
6601 CpuRegister obj,
6602 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006603 bool needs_null_check,
6604 bool always_update_field,
6605 CpuRegister* temp1,
6606 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006607 DCHECK(kEmitCompilerReadBarrier);
6608 DCHECK(kUseBakerReadBarrier);
6609
6610 // In slow path based read barriers, the read barrier call is
6611 // inserted after the original load. However, in fast path based
6612 // Baker's read barriers, we need to perform the load of
6613 // mirror::Object::monitor_ *before* the original reference load.
6614 // This load-load ordering is required by the read barrier.
6615 // The fast path/slow path (for Baker's algorithm) should look like:
6616 //
6617 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6618 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6619 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006620 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006621 // if (is_gray) {
6622 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6623 // }
6624 //
6625 // Note: the original implementation in ReadBarrier::Barrier is
6626 // slightly more complex as:
6627 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006628 // the high-bits of rb_state, which are expected to be all zeroes
6629 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6630 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006631 // - it performs additional checks that we do not do here for
6632 // performance reasons.
6633
6634 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006635 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6636
Vladimir Marko953437b2016-08-24 08:30:46 +00006637 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006638 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6639 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00006640 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6641 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6642 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6643
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006644 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00006645 // ref = ReadBarrier::Mark(ref);
6646 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6647 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006648 if (needs_null_check) {
6649 MaybeRecordImplicitNullCheck(instruction);
6650 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006651
6652 // Load fence to prevent load-load reordering.
6653 // Note that this is a no-op, thanks to the x86-64 memory model.
6654 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6655
6656 // The actual reference load.
6657 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006658 __ movl(ref_reg, src); // Flags are unaffected.
6659
6660 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6661 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006662 SlowPathCode* slow_path;
6663 if (always_update_field) {
6664 DCHECK(temp1 != nullptr);
6665 DCHECK(temp2 != nullptr);
6666 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
6667 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
6668 } else {
6669 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6670 instruction, ref, /* unpoison_ref_before_marking */ true);
6671 }
Vladimir Marko953437b2016-08-24 08:30:46 +00006672 AddSlowPath(slow_path);
6673
6674 // We have done the "if" of the gray bit check above, now branch based on the flags.
6675 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006676
6677 // Object* ref = ref_addr->AsMirrorPtr()
6678 __ MaybeUnpoisonHeapReference(ref_reg);
6679
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006680 __ Bind(slow_path->GetExitLabel());
6681}
6682
6683void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6684 Location out,
6685 Location ref,
6686 Location obj,
6687 uint32_t offset,
6688 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006689 DCHECK(kEmitCompilerReadBarrier);
6690
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006691 // Insert a slow path based read barrier *after* the reference load.
6692 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006693 // If heap poisoning is enabled, the unpoisoning of the loaded
6694 // reference will be carried out by the runtime within the slow
6695 // path.
6696 //
6697 // Note that `ref` currently does not get unpoisoned (when heap
6698 // poisoning is enabled), which is alright as the `ref` argument is
6699 // not used by the artReadBarrierSlow entry point.
6700 //
6701 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6702 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6703 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6704 AddSlowPath(slow_path);
6705
Roland Levillain0d5a2812015-11-13 10:07:31 +00006706 __ jmp(slow_path->GetEntryLabel());
6707 __ Bind(slow_path->GetExitLabel());
6708}
6709
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006710void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6711 Location out,
6712 Location ref,
6713 Location obj,
6714 uint32_t offset,
6715 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006717 // Baker's read barriers shall be handled by the fast path
6718 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6719 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006720 // If heap poisoning is enabled, unpoisoning will be taken care of
6721 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006722 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006723 } else if (kPoisonHeapReferences) {
6724 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6725 }
6726}
6727
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006728void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6729 Location out,
6730 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006731 DCHECK(kEmitCompilerReadBarrier);
6732
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006733 // Insert a slow path based read barrier *after* the GC root load.
6734 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006735 // Note that GC roots are not affected by heap poisoning, so we do
6736 // not need to do anything special for this here.
6737 SlowPathCode* slow_path =
6738 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6739 AddSlowPath(slow_path);
6740
Roland Levillain0d5a2812015-11-13 10:07:31 +00006741 __ jmp(slow_path->GetEntryLabel());
6742 __ Bind(slow_path->GetExitLabel());
6743}
6744
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006745void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006746 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006747 LOG(FATAL) << "Unreachable";
6748}
6749
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006750void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006751 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006752 LOG(FATAL) << "Unreachable";
6753}
6754
Mark Mendellfe57faa2015-09-18 09:26:15 -04006755// Simple implementation of packed switch - generate cascaded compare/jumps.
6756void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6757 LocationSummary* locations =
6758 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6759 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006760 locations->AddTemp(Location::RequiresRegister());
6761 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006762}
6763
6764void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6765 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006766 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006767 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006768 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6769 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6770 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006771 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6772
6773 // Should we generate smaller inline compare/jumps?
6774 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6775 // Figure out the correct compare values and jump conditions.
6776 // Handle the first compare/branch as a special case because it might
6777 // jump to the default case.
6778 DCHECK_GT(num_entries, 2u);
6779 Condition first_condition;
6780 uint32_t index;
6781 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6782 if (lower_bound != 0) {
6783 first_condition = kLess;
6784 __ cmpl(value_reg_in, Immediate(lower_bound));
6785 __ j(first_condition, codegen_->GetLabelOf(default_block));
6786 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6787
6788 index = 1;
6789 } else {
6790 // Handle all the compare/jumps below.
6791 first_condition = kBelow;
6792 index = 0;
6793 }
6794
6795 // Handle the rest of the compare/jumps.
6796 for (; index + 1 < num_entries; index += 2) {
6797 int32_t compare_to_value = lower_bound + index + 1;
6798 __ cmpl(value_reg_in, Immediate(compare_to_value));
6799 // Jump to successors[index] if value < case_value[index].
6800 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6801 // Jump to successors[index + 1] if value == case_value[index + 1].
6802 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6803 }
6804
6805 if (index != num_entries) {
6806 // There are an odd number of entries. Handle the last one.
6807 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006808 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006809 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6810 }
6811
6812 // And the default for any other value.
6813 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6814 __ jmp(codegen_->GetLabelOf(default_block));
6815 }
6816 return;
6817 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006818
6819 // Remove the bias, if needed.
6820 Register value_reg_out = value_reg_in.AsRegister();
6821 if (lower_bound != 0) {
6822 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6823 value_reg_out = temp_reg.AsRegister();
6824 }
6825 CpuRegister value_reg(value_reg_out);
6826
6827 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006828 __ cmpl(value_reg, Immediate(num_entries - 1));
6829 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006830
Mark Mendell9c86b482015-09-18 13:36:07 -04006831 // We are in the range of the table.
6832 // Load the address of the jump table in the constant area.
6833 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006834
Mark Mendell9c86b482015-09-18 13:36:07 -04006835 // Load the (signed) offset from the jump table.
6836 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6837
6838 // Add the offset to the address of the table base.
6839 __ addq(temp_reg, base_reg);
6840
6841 // And jump.
6842 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006843}
6844
Aart Bikc5d47542016-01-27 17:00:35 -08006845void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6846 if (value == 0) {
6847 __ xorl(dest, dest);
6848 } else {
6849 __ movl(dest, Immediate(value));
6850 }
6851}
6852
Mark Mendell92e83bf2015-05-07 11:25:03 -04006853void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6854 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006855 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006856 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006857 } else if (IsUint<32>(value)) {
6858 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006859 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6860 } else {
6861 __ movq(dest, Immediate(value));
6862 }
6863}
6864
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006865void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6866 if (value == 0) {
6867 __ xorps(dest, dest);
6868 } else {
6869 __ movss(dest, LiteralInt32Address(value));
6870 }
6871}
6872
6873void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6874 if (value == 0) {
6875 __ xorpd(dest, dest);
6876 } else {
6877 __ movsd(dest, LiteralInt64Address(value));
6878 }
6879}
6880
6881void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6882 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6883}
6884
6885void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6886 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6887}
6888
Aart Bika19616e2016-02-01 18:57:58 -08006889void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6890 if (value == 0) {
6891 __ testl(dest, dest);
6892 } else {
6893 __ cmpl(dest, Immediate(value));
6894 }
6895}
6896
6897void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6898 if (IsInt<32>(value)) {
6899 if (value == 0) {
6900 __ testq(dest, dest);
6901 } else {
6902 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6903 }
6904 } else {
6905 // Value won't fit in an int.
6906 __ cmpq(dest, LiteralInt64Address(value));
6907 }
6908}
6909
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006910void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6911 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07006912 GenerateIntCompare(lhs_reg, rhs);
6913}
6914
6915void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006916 if (rhs.IsConstant()) {
6917 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07006918 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006919 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07006920 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006921 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07006922 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006923 }
6924}
6925
6926void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6927 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6928 if (rhs.IsConstant()) {
6929 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6930 Compare64BitValue(lhs_reg, value);
6931 } else if (rhs.IsDoubleStackSlot()) {
6932 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6933 } else {
6934 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6935 }
6936}
6937
6938Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6939 Location index,
6940 ScaleFactor scale,
6941 uint32_t data_offset) {
6942 return index.IsConstant() ?
6943 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6944 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6945}
6946
Mark Mendellcfa410b2015-05-25 16:02:44 -04006947void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6948 DCHECK(dest.IsDoubleStackSlot());
6949 if (IsInt<32>(value)) {
6950 // Can move directly as an int32 constant.
6951 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6952 Immediate(static_cast<int32_t>(value)));
6953 } else {
6954 Load64BitValue(CpuRegister(TMP), value);
6955 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6956 }
6957}
6958
Mark Mendell9c86b482015-09-18 13:36:07 -04006959/**
6960 * Class to handle late fixup of offsets into constant area.
6961 */
6962class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6963 public:
6964 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6965 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6966
6967 protected:
6968 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6969
6970 CodeGeneratorX86_64* codegen_;
6971
6972 private:
6973 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6974 // Patch the correct offset for the instruction. We use the address of the
6975 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6976 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6977 int32_t relative_position = constant_offset - pos;
6978
6979 // Patch in the right value.
6980 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6981 }
6982
6983 // Location in constant area that the fixup refers to.
6984 size_t offset_into_constant_area_;
6985};
6986
6987/**
6988 t * Class to handle late fixup of offsets to a jump table that will be created in the
6989 * constant area.
6990 */
6991class JumpTableRIPFixup : public RIPFixup {
6992 public:
6993 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6994 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6995
6996 void CreateJumpTable() {
6997 X86_64Assembler* assembler = codegen_->GetAssembler();
6998
6999 // Ensure that the reference to the jump table has the correct offset.
7000 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7001 SetOffset(offset_in_constant_table);
7002
7003 // Compute the offset from the start of the function to this jump table.
7004 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7005
7006 // Populate the jump table with the correct values for the jump table.
7007 int32_t num_entries = switch_instr_->GetNumEntries();
7008 HBasicBlock* block = switch_instr_->GetBlock();
7009 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7010 // The value that we want is the target offset - the position of the table.
7011 for (int32_t i = 0; i < num_entries; i++) {
7012 HBasicBlock* b = successors[i];
7013 Label* l = codegen_->GetLabelOf(b);
7014 DCHECK(l->IsBound());
7015 int32_t offset_to_block = l->Position() - current_table_offset;
7016 assembler->AppendInt32(offset_to_block);
7017 }
7018 }
7019
7020 private:
7021 const HPackedSwitch* switch_instr_;
7022};
7023
Mark Mendellf55c3e02015-03-26 21:07:46 -04007024void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7025 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007026 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007027 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7028 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007029 assembler->Align(4, 0);
7030 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007031
7032 // Populate any jump tables.
7033 for (auto jump_table : fixups_to_jump_tables_) {
7034 jump_table->CreateJumpTable();
7035 }
7036
7037 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007038 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007039 }
7040
7041 // And finish up.
7042 CodeGenerator::Finalize(allocator);
7043}
7044
Mark Mendellf55c3e02015-03-26 21:07:46 -04007045Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
7046 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7047 return Address::RIP(fixup);
7048}
7049
7050Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
7051 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7052 return Address::RIP(fixup);
7053}
7054
7055Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
7056 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7057 return Address::RIP(fixup);
7058}
7059
7060Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
7061 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7062 return Address::RIP(fixup);
7063}
7064
Andreas Gampe85b62f22015-09-09 13:15:38 -07007065// TODO: trg as memory.
7066void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
7067 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007068 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007069 return;
7070 }
7071
7072 DCHECK_NE(type, Primitive::kPrimVoid);
7073
7074 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7075 if (trg.Equals(return_loc)) {
7076 return;
7077 }
7078
7079 // Let the parallel move resolver take care of all of this.
7080 HParallelMove parallel_move(GetGraph()->GetArena());
7081 parallel_move.AddMove(return_loc, trg, type, nullptr);
7082 GetMoveResolver()->EmitNativeCode(&parallel_move);
7083}
7084
Mark Mendell9c86b482015-09-18 13:36:07 -04007085Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7086 // Create a fixup to be used to create and address the jump table.
7087 JumpTableRIPFixup* table_fixup =
7088 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7089
7090 // We have to populate the jump tables.
7091 fixups_to_jump_tables_.push_back(table_fixup);
7092 return Address::RIP(table_fixup);
7093}
7094
Mark Mendellea5af682015-10-22 17:35:49 -04007095void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7096 const Address& addr_high,
7097 int64_t v,
7098 HInstruction* instruction) {
7099 if (IsInt<32>(v)) {
7100 int32_t v_32 = v;
7101 __ movq(addr_low, Immediate(v_32));
7102 MaybeRecordImplicitNullCheck(instruction);
7103 } else {
7104 // Didn't fit in a register. Do it in pieces.
7105 int32_t low_v = Low32Bits(v);
7106 int32_t high_v = High32Bits(v);
7107 __ movl(addr_low, Immediate(low_v));
7108 MaybeRecordImplicitNullCheck(instruction);
7109 __ movl(addr_high, Immediate(high_v));
7110 }
7111}
7112
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007113void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7114 for (const PatchInfo<Label>& info : jit_string_patches_) {
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007115 const auto& it = jit_string_roots_.find(StringReference(&info.dex_file,
7116 dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007117 DCHECK(it != jit_string_roots_.end());
7118 size_t index_in_table = it->second;
7119 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7120 uintptr_t address =
7121 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7122 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7123 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7124 dchecked_integral_cast<uint32_t>(address);
7125 }
7126}
7127
Roland Levillain4d027112015-07-01 15:41:14 +01007128#undef __
7129
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007130} // namespace x86_64
7131} // namespace art