blob: f9a3e429d7fe3a26e1f7c1378d4b53d94f5e3823 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
201 }
202
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000203 // We're moving two locations to locations that could overlap, so we need a parallel
204 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000205 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100206 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000207 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100208 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400209 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100210 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
211 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100212 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
213 ? kQuickThrowStringBounds
214 : kQuickThrowArrayBounds;
215 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100216 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000217 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 }
219
Alexandre Rames8158f282015-08-07 10:26:17 +0100220 bool IsFatal() const OVERRIDE { return true; }
221
Alexandre Rames9931f312015-06-19 14:47:01 +0100222 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
223
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100224 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100225 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
226};
227
Andreas Gampe85b62f22015-09-09 13:15:38 -0700228class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100229 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000230 LoadClassSlowPathX86_64(HLoadClass* cls,
231 HInstruction* at,
232 uint32_t dex_pc,
233 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000234 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000235 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
236 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100237
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000238 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000239 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000240 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100241 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100242
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000243 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000246 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100247 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000288 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000289 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000290 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000291
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000292 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000293 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100294 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
295 : locations->Out();
296 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000297 DCHECK(instruction_->IsCheckCast()
298 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000299
Roland Levillain0d5a2812015-11-13 10:07:31 +0000300 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000301 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000302
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000303 if (!is_fatal_) {
304 SaveLiveRegisters(codegen, locations);
305 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000306
307 // We're moving two locations to locations that could overlap, so we need a parallel
308 // move resolver.
309 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000310 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100311 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000312 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100313 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100314 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100315 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
316 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000318 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100319 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000320 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700321 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000322 } else {
323 DCHECK(instruction_->IsCheckCast());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100324 x86_64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000325 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000326 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000327
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328 if (!is_fatal_) {
329 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000330 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000332
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000333 RestoreLiveRegisters(codegen, locations);
334 __ jmp(GetExitLabel());
335 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 }
337
Alexandre Rames9931f312015-06-19 14:47:01 +0100338 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
339
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000340 bool IsFatal() const OVERRIDE { return is_fatal_; }
341
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000342 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000344
345 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
346};
347
Andreas Gampe85b62f22015-09-09 13:15:38 -0700348class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700349 public:
Aart Bik42249c32016-01-07 15:33:50 -0800350 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000351 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700352
353 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000354 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700355 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100356 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000357 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700358 }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
361
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700362 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700363 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
364};
365
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100366class ArraySetSlowPathX86_64 : public SlowPathCode {
367 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100369
370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 LocationSummary* locations = instruction_->GetLocations();
372 __ Bind(GetEntryLabel());
373 SaveLiveRegisters(codegen, locations);
374
375 InvokeRuntimeCallingConvention calling_convention;
376 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
377 parallel_move.AddMove(
378 locations->InAt(0),
379 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
380 Primitive::kPrimNot,
381 nullptr);
382 parallel_move.AddMove(
383 locations->InAt(1),
384 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
385 Primitive::kPrimInt,
386 nullptr);
387 parallel_move.AddMove(
388 locations->InAt(2),
389 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
390 Primitive::kPrimNot,
391 nullptr);
392 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
393
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100395 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000396 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100397 RestoreLiveRegisters(codegen, locations);
398 __ jmp(GetExitLabel());
399 }
400
401 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
402
403 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100404 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
405};
406
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000407// Slow path marking an object during a read barrier.
408class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
409 public:
Vladimir Marko953437b2016-08-24 08:30:46 +0000410 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj, bool unpoison)
411 : SlowPathCode(instruction), obj_(obj), unpoison_(unpoison) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000412 DCHECK(kEmitCompilerReadBarrier);
413 }
414
415 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
416
417 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
418 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100419 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000420 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100421 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000422 DCHECK(instruction_->IsInstanceFieldGet() ||
423 instruction_->IsStaticFieldGet() ||
424 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100425 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000426 instruction_->IsLoadClass() ||
427 instruction_->IsLoadString() ||
428 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100429 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100430 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
431 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000432 << "Unexpected instruction in read barrier marking slow path: "
433 << instruction_->DebugName();
434
435 __ Bind(GetEntryLabel());
Vladimir Marko953437b2016-08-24 08:30:46 +0000436 if (unpoison_) {
437 // Object* ref = ref_addr->AsMirrorPtr()
438 __ MaybeUnpoisonHeapReference(obj_.AsRegister<CpuRegister>());
439 }
Roland Levillain4359e612016-07-20 11:32:19 +0100440 // No need to save live registers; it's taken care of by the
441 // entrypoint. Also, there is no need to update the stack mask,
442 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000443 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100444 DCHECK_NE(reg, RSP);
445 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
446 // "Compact" slow path, saving two moves.
447 //
448 // Instead of using the standard runtime calling convention (input
449 // and output in R0):
450 //
451 // RDI <- obj
452 // RAX <- ReadBarrierMark(RDI)
453 // obj <- RAX
454 //
455 // we just use rX (the register holding `obj`) as input and output
456 // of a dedicated entrypoint:
457 //
458 // rX <- ReadBarrierMarkRegX(rX)
459 //
460 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700461 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100462 // This runtime call does not require a stack map.
463 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000464 __ jmp(GetExitLabel());
465 }
466
467 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468 const Location obj_;
Vladimir Marko953437b2016-08-24 08:30:46 +0000469 const bool unpoison_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000470
471 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
472};
473
Roland Levillain0d5a2812015-11-13 10:07:31 +0000474// Slow path generating a read barrier for a heap reference.
475class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
476 public:
477 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
478 Location out,
479 Location ref,
480 Location obj,
481 uint32_t offset,
482 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000483 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000484 out_(out),
485 ref_(ref),
486 obj_(obj),
487 offset_(offset),
488 index_(index) {
489 DCHECK(kEmitCompilerReadBarrier);
490 // If `obj` is equal to `out` or `ref`, it means the initial
491 // object has been overwritten by (or after) the heap object
492 // reference load to be instrumented, e.g.:
493 //
494 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000496 //
497 // In that case, we have lost the information about the original
498 // object, and the emitted read barrier cannot work properly.
499 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
500 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
501}
502
503 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
504 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
505 LocationSummary* locations = instruction_->GetLocations();
506 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
507 DCHECK(locations->CanCall());
508 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100509 DCHECK(instruction_->IsInstanceFieldGet() ||
510 instruction_->IsStaticFieldGet() ||
511 instruction_->IsArrayGet() ||
512 instruction_->IsInstanceOf() ||
513 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000515 << "Unexpected instruction in read barrier for heap reference slow path: "
516 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000517
518 __ Bind(GetEntryLabel());
519 SaveLiveRegisters(codegen, locations);
520
521 // We may have to change the index's value, but as `index_` is a
522 // constant member (like other "inputs" of this slow path),
523 // introduce a copy of it, `index`.
524 Location index = index_;
525 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100526 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000527 if (instruction_->IsArrayGet()) {
528 // Compute real offset and store it in index_.
529 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
530 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
531 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
532 // We are about to change the value of `index_reg` (see the
533 // calls to art::x86_64::X86_64Assembler::shll and
534 // art::x86_64::X86_64Assembler::AddImmediate below), but it
535 // has not been saved by the previous call to
536 // art::SlowPathCode::SaveLiveRegisters, as it is a
537 // callee-save register --
538 // art::SlowPathCode::SaveLiveRegisters does not consider
539 // callee-save registers, as it has been designed with the
540 // assumption that callee-save registers are supposed to be
541 // handled by the called function. So, as a callee-save
542 // register, `index_reg` _would_ eventually be saved onto
543 // the stack, but it would be too late: we would have
544 // changed its value earlier. Therefore, we manually save
545 // it here into another freely available register,
546 // `free_reg`, chosen of course among the caller-save
547 // registers (as a callee-save `free_reg` register would
548 // exhibit the same problem).
549 //
550 // Note we could have requested a temporary register from
551 // the register allocator instead; but we prefer not to, as
552 // this is a slow path, and we know we can find a
553 // caller-save register that is available.
554 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
555 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
556 index_reg = free_reg;
557 index = Location::RegisterLocation(index_reg);
558 } else {
559 // The initial register stored in `index_` has already been
560 // saved in the call to art::SlowPathCode::SaveLiveRegisters
561 // (as it is not a callee-save register), so we can freely
562 // use it.
563 }
564 // Shifting the index value contained in `index_reg` by the
565 // scale factor (2) cannot overflow in practice, as the
566 // runtime is unable to allocate object arrays with a size
567 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
568 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
569 static_assert(
570 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
571 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
572 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
573 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100574 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
575 // intrinsics, `index_` is not shifted by a scale factor of 2
576 // (as in the case of ArrayGet), as it is actually an offset
577 // to an object field within an object.
578 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000579 DCHECK(instruction_->GetLocations()->Intrinsified());
580 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
581 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
582 << instruction_->AsInvoke()->GetIntrinsic();
583 DCHECK_EQ(offset_, 0U);
584 DCHECK(index_.IsRegister());
585 }
586 }
587
588 // We're moving two or three locations to locations that could
589 // overlap, so we need a parallel move resolver.
590 InvokeRuntimeCallingConvention calling_convention;
591 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
592 parallel_move.AddMove(ref_,
593 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
594 Primitive::kPrimNot,
595 nullptr);
596 parallel_move.AddMove(obj_,
597 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
598 Primitive::kPrimNot,
599 nullptr);
600 if (index.IsValid()) {
601 parallel_move.AddMove(index,
602 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
603 Primitive::kPrimInt,
604 nullptr);
605 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
606 } else {
607 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
608 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
609 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100610 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000611 instruction_,
612 instruction_->GetDexPc(),
613 this);
614 CheckEntrypointTypes<
615 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
616 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
617
618 RestoreLiveRegisters(codegen, locations);
619 __ jmp(GetExitLabel());
620 }
621
622 const char* GetDescription() const OVERRIDE {
623 return "ReadBarrierForHeapReferenceSlowPathX86_64";
624 }
625
626 private:
627 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
628 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
629 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
630 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
631 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
632 return static_cast<CpuRegister>(i);
633 }
634 }
635 // We shall never fail to find a free caller-save register, as
636 // there are more than two core caller-save registers on x86-64
637 // (meaning it is possible to find one which is different from
638 // `ref` and `obj`).
639 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
640 LOG(FATAL) << "Could not find a free caller-save register";
641 UNREACHABLE();
642 }
643
Roland Levillain0d5a2812015-11-13 10:07:31 +0000644 const Location out_;
645 const Location ref_;
646 const Location obj_;
647 const uint32_t offset_;
648 // An additional location containing an index to an array.
649 // Only used for HArrayGet and the UnsafeGetObject &
650 // UnsafeGetObjectVolatile intrinsics.
651 const Location index_;
652
653 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
654};
655
656// Slow path generating a read barrier for a GC root.
657class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
658 public:
659 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000660 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000661 DCHECK(kEmitCompilerReadBarrier);
662 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000663
664 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
665 LocationSummary* locations = instruction_->GetLocations();
666 DCHECK(locations->CanCall());
667 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000668 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
669 << "Unexpected instruction in read barrier for GC root slow path: "
670 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000671
672 __ Bind(GetEntryLabel());
673 SaveLiveRegisters(codegen, locations);
674
675 InvokeRuntimeCallingConvention calling_convention;
676 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
677 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100678 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000679 instruction_,
680 instruction_->GetDexPc(),
681 this);
682 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
683 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
684
685 RestoreLiveRegisters(codegen, locations);
686 __ jmp(GetExitLabel());
687 }
688
689 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
690
691 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000692 const Location out_;
693 const Location root_;
694
695 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
696};
697
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100698#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100699// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
700#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100701
Roland Levillain4fa13f62015-07-06 18:11:54 +0100702inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700703 switch (cond) {
704 case kCondEQ: return kEqual;
705 case kCondNE: return kNotEqual;
706 case kCondLT: return kLess;
707 case kCondLE: return kLessEqual;
708 case kCondGT: return kGreater;
709 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700710 case kCondB: return kBelow;
711 case kCondBE: return kBelowEqual;
712 case kCondA: return kAbove;
713 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700714 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100715 LOG(FATAL) << "Unreachable";
716 UNREACHABLE();
717}
718
Aart Bike9f37602015-10-09 11:15:55 -0700719// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100720inline Condition X86_64FPCondition(IfCondition cond) {
721 switch (cond) {
722 case kCondEQ: return kEqual;
723 case kCondNE: return kNotEqual;
724 case kCondLT: return kBelow;
725 case kCondLE: return kBelowEqual;
726 case kCondGT: return kAbove;
727 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700728 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100729 };
730 LOG(FATAL) << "Unreachable";
731 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700732}
733
Vladimir Markodc151b22015-10-15 18:02:30 +0100734HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
735 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100736 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +0100737 switch (desired_dispatch_info.code_ptr_location) {
738 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
739 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
740 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
741 return HInvokeStaticOrDirect::DispatchInfo {
742 desired_dispatch_info.method_load_kind,
743 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
744 desired_dispatch_info.method_load_data,
745 0u
746 };
747 default:
748 return desired_dispatch_info;
749 }
750}
751
Serguei Katkov288c7a82016-05-16 11:53:15 +0600752Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
753 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800754 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000755 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
756 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100757 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000758 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100759 uint32_t offset =
760 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
761 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000762 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100763 }
Vladimir Marko58155012015-08-19 12:49:41 +0000764 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000765 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000766 break;
767 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
768 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
769 break;
770 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
771 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
772 method_patches_.emplace_back(invoke->GetTargetMethod());
773 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
774 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000775 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000776 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000777 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000778 // Bind a new fixup label at the end of the "movl" insn.
779 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100780 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000781 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000782 }
Vladimir Marko58155012015-08-19 12:49:41 +0000783 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000784 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000785 Register method_reg;
786 CpuRegister reg = temp.AsRegister<CpuRegister>();
787 if (current_method.IsRegister()) {
788 method_reg = current_method.AsRegister<Register>();
789 } else {
790 DCHECK(invoke->GetLocations()->Intrinsified());
791 DCHECK(!current_method.IsValid());
792 method_reg = reg.AsRegister();
793 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
794 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000795 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100796 __ movq(reg,
797 Address(CpuRegister(method_reg),
798 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100799 // temp = temp[index_in_cache];
800 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
801 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000802 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
803 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100804 }
Vladimir Marko58155012015-08-19 12:49:41 +0000805 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600806 return callee_method;
807}
808
809void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
810 Location temp) {
811 // All registers are assumed to be correctly set up.
812 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000813
814 switch (invoke->GetCodePtrLocation()) {
815 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
816 __ call(&frame_entry_label_);
817 break;
818 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
819 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
820 Label* label = &relative_call_patches_.back().label;
821 __ call(label); // Bind to the patch label, override at link time.
822 __ Bind(label); // Bind the label at the end of the "call" insn.
823 break;
824 }
825 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
826 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100827 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
828 LOG(FATAL) << "Unsupported";
829 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000830 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
831 // (callee_method + offset_of_quick_compiled_code)()
832 __ call(Address(callee_method.AsRegister<CpuRegister>(),
833 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700834 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000835 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000836 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800837
838 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800839}
840
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000841void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
842 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
843 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
844 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000845
846 // Use the calling convention instead of the location of the receiver, as
847 // intrinsics may have put the receiver in a different register. In the intrinsics
848 // slow path, the arguments have been moved to the right place, so here we are
849 // guaranteed that the receiver is the first register of the calling convention.
850 InvokeDexCallingConvention calling_convention;
851 Register receiver = calling_convention.GetRegisterAt(0);
852
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000853 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000854 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000855 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000856 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000857 // Instead of simply (possibly) unpoisoning `temp` here, we should
858 // emit a read barrier for the previous class reference load.
859 // However this is not required in practice, as this is an
860 // intermediate/temporary reference and because the current
861 // concurrent copying collector keeps the from-space memory
862 // intact/accessible until the end of the marking phase (the
863 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000864 __ MaybeUnpoisonHeapReference(temp);
865 // temp = temp->GetMethodAt(method_offset);
866 __ movq(temp, Address(temp, method_offset));
867 // call temp->GetEntryPoint();
868 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700869 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000870}
871
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000872void CodeGeneratorX86_64::RecordSimplePatch() {
873 if (GetCompilerOptions().GetIncludePatchInformation()) {
874 simple_patches_.emplace_back();
875 __ Bind(&simple_patches_.back());
876 }
877}
878
879void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
880 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
881 __ Bind(&string_patches_.back().label);
882}
883
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100884void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
885 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
886 __ Bind(&type_patches_.back().label);
887}
888
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000889Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
890 uint32_t element_offset) {
891 // Add a patch entry and return the label.
892 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
893 return &pc_relative_dex_cache_patches_.back().label;
894}
895
Vladimir Marko58155012015-08-19 12:49:41 +0000896void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
897 DCHECK(linker_patches->empty());
898 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000899 method_patches_.size() +
900 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000901 pc_relative_dex_cache_patches_.size() +
902 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100903 string_patches_.size() +
904 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000905 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000906 // The label points to the end of the "movl" insn but the literal offset for method
907 // patch needs to point to the embedded constant which occupies the last 4 bytes.
908 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000909 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000910 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000911 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
912 info.target_method.dex_file,
913 info.target_method.dex_method_index));
914 }
915 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000916 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000917 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
918 info.target_method.dex_file,
919 info.target_method.dex_method_index));
920 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000921 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
922 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000923 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
924 &info.target_dex_file,
925 info.label.Position(),
926 info.element_offset));
927 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000928 for (const Label& label : simple_patches_) {
929 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
930 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
931 }
932 for (const StringPatchInfo<Label>& info : string_patches_) {
933 // These are always PC-relative, see GetSupportedLoadStringKind().
934 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
935 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
936 &info.dex_file,
937 info.label.Position(),
938 info.string_index));
939 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100940 for (const TypePatchInfo<Label>& info : type_patches_) {
941 // These are always PC-relative, see GetSupportedLoadClassKind().
942 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
943 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
944 &info.dex_file,
945 info.label.Position(),
946 info.type_index));
947 }
Vladimir Marko58155012015-08-19 12:49:41 +0000948}
949
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100950void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100951 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100952}
953
954void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100955 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100956}
957
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100958size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
959 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
960 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100961}
962
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100963size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
964 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
965 return kX86_64WordSize;
966}
967
968size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
969 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
970 return kX86_64WordSize;
971}
972
973size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
974 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
975 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100976}
977
Calin Juravle175dc732015-08-25 15:42:32 +0100978void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
979 HInstruction* instruction,
980 uint32_t dex_pc,
981 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100982 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100983 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
984 if (EntrypointRequiresStackMap(entrypoint)) {
985 RecordPcInfo(instruction, dex_pc, slow_path);
986 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100987}
988
Roland Levillaindec8f632016-07-22 17:10:06 +0100989void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
990 HInstruction* instruction,
991 SlowPathCode* slow_path) {
992 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100993 GenerateInvokeRuntime(entry_point_offset);
994}
995
996void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +0100997 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
998}
999
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001000static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001001// Use a fake return address register to mimic Quick.
1002static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001003CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001004 const X86_64InstructionSetFeatures& isa_features,
1005 const CompilerOptions& compiler_options,
1006 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001007 : CodeGenerator(graph,
1008 kNumberOfCpuRegisters,
1009 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001010 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001011 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1012 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001013 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001014 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1015 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001016 compiler_options,
1017 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001018 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001019 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001020 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001021 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001022 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001023 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001024 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001025 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1026 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001027 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001028 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1029 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001030 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001031 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001032 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1033}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001034
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001035InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1036 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001037 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001038 assembler_(codegen->GetAssembler()),
1039 codegen_(codegen) {}
1040
David Brazdil58282f42016-01-14 12:45:10 +00001041void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001042 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001043 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001044
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001045 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001046 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001047}
1048
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001049static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001050 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001051}
David Srbecky9d8606d2015-04-12 09:35:32 +01001052
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001053static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001054 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001055}
1056
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001057void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001058 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001059 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001060 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001061 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001062 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001063
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001064 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001065 __ testq(CpuRegister(RAX), Address(
1066 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001067 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001068 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001069
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001070 if (HasEmptyFrame()) {
1071 return;
1072 }
1073
Nicolas Geoffray98893962015-01-21 12:32:32 +00001074 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001075 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001076 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001077 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001078 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1079 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001080 }
1081 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001082
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001083 int adjust = GetFrameSize() - GetCoreSpillSize();
1084 __ subq(CpuRegister(RSP), Immediate(adjust));
1085 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001086 uint32_t xmm_spill_location = GetFpuSpillStart();
1087 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001088
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001089 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1090 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001091 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1092 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1093 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001094 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001095 }
1096
Mathieu Chartiere401d142015-04-22 13:56:20 -07001097 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001098 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001099}
1100
1101void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001102 __ cfi().RememberState();
1103 if (!HasEmptyFrame()) {
1104 uint32_t xmm_spill_location = GetFpuSpillStart();
1105 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1106 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1107 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1108 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1109 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1110 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1111 }
1112 }
1113
1114 int adjust = GetFrameSize() - GetCoreSpillSize();
1115 __ addq(CpuRegister(RSP), Immediate(adjust));
1116 __ cfi().AdjustCFAOffset(-adjust);
1117
1118 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1119 Register reg = kCoreCalleeSaves[i];
1120 if (allocated_registers_.ContainsCoreRegister(reg)) {
1121 __ popq(CpuRegister(reg));
1122 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1123 __ cfi().Restore(DWARFReg(reg));
1124 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001125 }
1126 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001127 __ ret();
1128 __ cfi().RestoreState();
1129 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001130}
1131
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001132void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1133 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001134}
1135
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001136void CodeGeneratorX86_64::Move(Location destination, Location source) {
1137 if (source.Equals(destination)) {
1138 return;
1139 }
1140 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001141 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001142 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001143 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001144 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001145 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001146 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001147 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1148 } else if (source.IsConstant()) {
1149 HConstant* constant = source.GetConstant();
1150 if (constant->IsLongConstant()) {
1151 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1152 } else {
1153 Load32BitValue(dest, GetInt32ValueOf(constant));
1154 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001155 } else {
1156 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001157 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001158 }
1159 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001160 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001161 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001162 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001163 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001164 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1165 } else if (source.IsConstant()) {
1166 HConstant* constant = source.GetConstant();
1167 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1168 if (constant->IsFloatConstant()) {
1169 Load32BitValue(dest, static_cast<int32_t>(value));
1170 } else {
1171 Load64BitValue(dest, value);
1172 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001173 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001174 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001175 } else {
1176 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001177 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001178 }
1179 } else if (destination.IsStackSlot()) {
1180 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001181 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001182 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001183 } else if (source.IsFpuRegister()) {
1184 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001185 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001186 } else if (source.IsConstant()) {
1187 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001188 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001189 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001190 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001191 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001192 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1193 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001194 }
1195 } else {
1196 DCHECK(destination.IsDoubleStackSlot());
1197 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001198 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001199 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001200 } else if (source.IsFpuRegister()) {
1201 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001202 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001203 } else if (source.IsConstant()) {
1204 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001205 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1206 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001207 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001208 } else {
1209 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001210 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1211 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001212 }
1213 }
1214}
1215
Calin Juravle175dc732015-08-25 15:42:32 +01001216void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1217 DCHECK(location.IsRegister());
1218 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1219}
1220
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221void CodeGeneratorX86_64::MoveLocation(
1222 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1223 Move(dst, src);
1224}
1225
1226void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1227 if (location.IsRegister()) {
1228 locations->AddTemp(location);
1229 } else {
1230 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1231 }
1232}
1233
David Brazdilfc6a86a2015-06-26 10:33:45 +00001234void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001235 DCHECK(!successor->IsExitBlock());
1236
1237 HBasicBlock* block = got->GetBlock();
1238 HInstruction* previous = got->GetPrevious();
1239
1240 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001241 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001242 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1243 return;
1244 }
1245
1246 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1247 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1248 }
1249 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001250 __ jmp(codegen_->GetLabelOf(successor));
1251 }
1252}
1253
David Brazdilfc6a86a2015-06-26 10:33:45 +00001254void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1255 got->SetLocations(nullptr);
1256}
1257
1258void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1259 HandleGoto(got, got->GetSuccessor());
1260}
1261
1262void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1263 try_boundary->SetLocations(nullptr);
1264}
1265
1266void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1267 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1268 if (!successor->IsExitBlock()) {
1269 HandleGoto(try_boundary, successor);
1270 }
1271}
1272
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001273void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1274 exit->SetLocations(nullptr);
1275}
1276
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001277void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001278}
1279
Mark Mendell152408f2015-12-31 12:28:50 -05001280template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001281void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001282 LabelType* true_label,
1283 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001284 if (cond->IsFPConditionTrueIfNaN()) {
1285 __ j(kUnordered, true_label);
1286 } else if (cond->IsFPConditionFalseIfNaN()) {
1287 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001288 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001289 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001290}
1291
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001292void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001293 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001294
Mark Mendellc4701932015-04-10 13:18:51 -04001295 Location left = locations->InAt(0);
1296 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001297 Primitive::Type type = condition->InputAt(0)->GetType();
1298 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001299 case Primitive::kPrimBoolean:
1300 case Primitive::kPrimByte:
1301 case Primitive::kPrimChar:
1302 case Primitive::kPrimShort:
1303 case Primitive::kPrimInt:
1304 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001305 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001306 break;
1307 }
Mark Mendellc4701932015-04-10 13:18:51 -04001308 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001309 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001310 break;
1311 }
1312 case Primitive::kPrimFloat: {
1313 if (right.IsFpuRegister()) {
1314 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1315 } else if (right.IsConstant()) {
1316 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1317 codegen_->LiteralFloatAddress(
1318 right.GetConstant()->AsFloatConstant()->GetValue()));
1319 } else {
1320 DCHECK(right.IsStackSlot());
1321 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1322 Address(CpuRegister(RSP), right.GetStackIndex()));
1323 }
Mark Mendellc4701932015-04-10 13:18:51 -04001324 break;
1325 }
1326 case Primitive::kPrimDouble: {
1327 if (right.IsFpuRegister()) {
1328 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1329 } else if (right.IsConstant()) {
1330 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1331 codegen_->LiteralDoubleAddress(
1332 right.GetConstant()->AsDoubleConstant()->GetValue()));
1333 } else {
1334 DCHECK(right.IsDoubleStackSlot());
1335 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1336 Address(CpuRegister(RSP), right.GetStackIndex()));
1337 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001338 break;
1339 }
1340 default:
1341 LOG(FATAL) << "Unexpected condition type " << type;
1342 }
1343}
1344
1345template<class LabelType>
1346void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1347 LabelType* true_target_in,
1348 LabelType* false_target_in) {
1349 // Generated branching requires both targets to be explicit. If either of the
1350 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1351 LabelType fallthrough_target;
1352 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1353 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1354
1355 // Generate the comparison to set the CC.
1356 GenerateCompareTest(condition);
1357
1358 // Now generate the correct jump(s).
1359 Primitive::Type type = condition->InputAt(0)->GetType();
1360 switch (type) {
1361 case Primitive::kPrimLong: {
1362 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1363 break;
1364 }
1365 case Primitive::kPrimFloat: {
1366 GenerateFPJumps(condition, true_target, false_target);
1367 break;
1368 }
1369 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001370 GenerateFPJumps(condition, true_target, false_target);
1371 break;
1372 }
1373 default:
1374 LOG(FATAL) << "Unexpected condition type " << type;
1375 }
1376
David Brazdil0debae72015-11-12 18:37:00 +00001377 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001378 __ jmp(false_target);
1379 }
David Brazdil0debae72015-11-12 18:37:00 +00001380
1381 if (fallthrough_target.IsLinked()) {
1382 __ Bind(&fallthrough_target);
1383 }
Mark Mendellc4701932015-04-10 13:18:51 -04001384}
1385
David Brazdil0debae72015-11-12 18:37:00 +00001386static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1387 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1388 // are set only strictly before `branch`. We can't use the eflags on long
1389 // conditions if they are materialized due to the complex branching.
1390 return cond->IsCondition() &&
1391 cond->GetNext() == branch &&
1392 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1393}
1394
Mark Mendell152408f2015-12-31 12:28:50 -05001395template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001396void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001397 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001398 LabelType* true_target,
1399 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001400 HInstruction* cond = instruction->InputAt(condition_input_index);
1401
1402 if (true_target == nullptr && false_target == nullptr) {
1403 // Nothing to do. The code always falls through.
1404 return;
1405 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001406 // Constant condition, statically compared against "true" (integer value 1).
1407 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001408 if (true_target != nullptr) {
1409 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001410 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001411 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001412 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001413 if (false_target != nullptr) {
1414 __ jmp(false_target);
1415 }
1416 }
1417 return;
1418 }
1419
1420 // The following code generates these patterns:
1421 // (1) true_target == nullptr && false_target != nullptr
1422 // - opposite condition true => branch to false_target
1423 // (2) true_target != nullptr && false_target == nullptr
1424 // - condition true => branch to true_target
1425 // (3) true_target != nullptr && false_target != nullptr
1426 // - condition true => branch to true_target
1427 // - branch to false_target
1428 if (IsBooleanValueOrMaterializedCondition(cond)) {
1429 if (AreEflagsSetFrom(cond, instruction)) {
1430 if (true_target == nullptr) {
1431 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1432 } else {
1433 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1434 }
1435 } else {
1436 // Materialized condition, compare against 0.
1437 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1438 if (lhs.IsRegister()) {
1439 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1440 } else {
1441 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1442 }
1443 if (true_target == nullptr) {
1444 __ j(kEqual, false_target);
1445 } else {
1446 __ j(kNotEqual, true_target);
1447 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001448 }
1449 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001450 // Condition has not been materialized, use its inputs as the
1451 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001452 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001453
David Brazdil0debae72015-11-12 18:37:00 +00001454 // If this is a long or FP comparison that has been folded into
1455 // the HCondition, generate the comparison directly.
1456 Primitive::Type type = condition->InputAt(0)->GetType();
1457 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1458 GenerateCompareTestAndBranch(condition, true_target, false_target);
1459 return;
1460 }
1461
1462 Location lhs = condition->GetLocations()->InAt(0);
1463 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001464 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001465 if (true_target == nullptr) {
1466 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1467 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001468 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001469 }
Dave Allison20dfc792014-06-16 20:44:29 -07001470 }
David Brazdil0debae72015-11-12 18:37:00 +00001471
1472 // If neither branch falls through (case 3), the conditional branch to `true_target`
1473 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1474 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001475 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001476 }
1477}
1478
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001479void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001480 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1481 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001482 locations->SetInAt(0, Location::Any());
1483 }
1484}
1485
1486void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001487 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1488 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1489 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1490 nullptr : codegen_->GetLabelOf(true_successor);
1491 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1492 nullptr : codegen_->GetLabelOf(false_successor);
1493 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001494}
1495
1496void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1497 LocationSummary* locations = new (GetGraph()->GetArena())
1498 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001499 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001500 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001501 locations->SetInAt(0, Location::Any());
1502 }
1503}
1504
1505void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001506 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001507 GenerateTestAndBranch<Label>(deoptimize,
1508 /* condition_input_index */ 0,
1509 slow_path->GetEntryLabel(),
1510 /* false_target */ nullptr);
1511}
1512
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001513static bool SelectCanUseCMOV(HSelect* select) {
1514 // There are no conditional move instructions for XMMs.
1515 if (Primitive::IsFloatingPointType(select->GetType())) {
1516 return false;
1517 }
1518
1519 // A FP condition doesn't generate the single CC that we need.
1520 HInstruction* condition = select->GetCondition();
1521 if (condition->IsCondition() &&
1522 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1523 return false;
1524 }
1525
1526 // We can generate a CMOV for this Select.
1527 return true;
1528}
1529
David Brazdil74eb1b22015-12-14 11:44:01 +00001530void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1531 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1532 if (Primitive::IsFloatingPointType(select->GetType())) {
1533 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001534 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001535 } else {
1536 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001537 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001538 if (select->InputAt(1)->IsConstant()) {
1539 locations->SetInAt(1, Location::RequiresRegister());
1540 } else {
1541 locations->SetInAt(1, Location::Any());
1542 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001543 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001544 locations->SetInAt(1, Location::Any());
1545 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001546 }
1547 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1548 locations->SetInAt(2, Location::RequiresRegister());
1549 }
1550 locations->SetOut(Location::SameAsFirstInput());
1551}
1552
1553void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1554 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001555 if (SelectCanUseCMOV(select)) {
1556 // If both the condition and the source types are integer, we can generate
1557 // a CMOV to implement Select.
1558 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001559 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001560 DCHECK(locations->InAt(0).Equals(locations->Out()));
1561
1562 HInstruction* select_condition = select->GetCondition();
1563 Condition cond = kNotEqual;
1564
1565 // Figure out how to test the 'condition'.
1566 if (select_condition->IsCondition()) {
1567 HCondition* condition = select_condition->AsCondition();
1568 if (!condition->IsEmittedAtUseSite()) {
1569 // This was a previously materialized condition.
1570 // Can we use the existing condition code?
1571 if (AreEflagsSetFrom(condition, select)) {
1572 // Materialization was the previous instruction. Condition codes are right.
1573 cond = X86_64IntegerCondition(condition->GetCondition());
1574 } else {
1575 // No, we have to recreate the condition code.
1576 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1577 __ testl(cond_reg, cond_reg);
1578 }
1579 } else {
1580 GenerateCompareTest(condition);
1581 cond = X86_64IntegerCondition(condition->GetCondition());
1582 }
1583 } else {
1584 // Must be a boolean condition, which needs to be compared to 0.
1585 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1586 __ testl(cond_reg, cond_reg);
1587 }
1588
1589 // If the condition is true, overwrite the output, which already contains false.
1590 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001591 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1592 if (value_true_loc.IsRegister()) {
1593 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1594 } else {
1595 __ cmov(cond,
1596 value_false,
1597 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1598 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001599 } else {
1600 NearLabel false_target;
1601 GenerateTestAndBranch<NearLabel>(select,
1602 /* condition_input_index */ 2,
1603 /* true_target */ nullptr,
1604 &false_target);
1605 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1606 __ Bind(&false_target);
1607 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001608}
1609
David Srbecky0cf44932015-12-09 14:09:59 +00001610void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1611 new (GetGraph()->GetArena()) LocationSummary(info);
1612}
1613
David Srbeckyd28f4a02016-03-14 17:14:24 +00001614void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1615 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001616}
1617
1618void CodeGeneratorX86_64::GenerateNop() {
1619 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001620}
1621
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001622void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001623 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001624 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001625 // Handle the long/FP comparisons made in instruction simplification.
1626 switch (cond->InputAt(0)->GetType()) {
1627 case Primitive::kPrimLong:
1628 locations->SetInAt(0, Location::RequiresRegister());
1629 locations->SetInAt(1, Location::Any());
1630 break;
1631 case Primitive::kPrimFloat:
1632 case Primitive::kPrimDouble:
1633 locations->SetInAt(0, Location::RequiresFpuRegister());
1634 locations->SetInAt(1, Location::Any());
1635 break;
1636 default:
1637 locations->SetInAt(0, Location::RequiresRegister());
1638 locations->SetInAt(1, Location::Any());
1639 break;
1640 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001641 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001642 locations->SetOut(Location::RequiresRegister());
1643 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001644}
1645
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001646void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001647 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001648 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001649 }
Mark Mendellc4701932015-04-10 13:18:51 -04001650
1651 LocationSummary* locations = cond->GetLocations();
1652 Location lhs = locations->InAt(0);
1653 Location rhs = locations->InAt(1);
1654 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001655 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001656
1657 switch (cond->InputAt(0)->GetType()) {
1658 default:
1659 // Integer case.
1660
1661 // Clear output register: setcc only sets the low byte.
1662 __ xorl(reg, reg);
1663
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001664 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001665 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001666 return;
1667 case Primitive::kPrimLong:
1668 // Clear output register: setcc only sets the low byte.
1669 __ xorl(reg, reg);
1670
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001671 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001672 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001673 return;
1674 case Primitive::kPrimFloat: {
1675 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1676 if (rhs.IsConstant()) {
1677 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1678 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1679 } else if (rhs.IsStackSlot()) {
1680 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1681 } else {
1682 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1683 }
1684 GenerateFPJumps(cond, &true_label, &false_label);
1685 break;
1686 }
1687 case Primitive::kPrimDouble: {
1688 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1689 if (rhs.IsConstant()) {
1690 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1691 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1692 } else if (rhs.IsDoubleStackSlot()) {
1693 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1694 } else {
1695 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1696 }
1697 GenerateFPJumps(cond, &true_label, &false_label);
1698 break;
1699 }
1700 }
1701
1702 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001703 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001704
Roland Levillain4fa13f62015-07-06 18:11:54 +01001705 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001706 __ Bind(&false_label);
1707 __ xorl(reg, reg);
1708 __ jmp(&done_label);
1709
Roland Levillain4fa13f62015-07-06 18:11:54 +01001710 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001711 __ Bind(&true_label);
1712 __ movl(reg, Immediate(1));
1713 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001714}
1715
1716void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001717 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001718}
1719
1720void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001721 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001722}
1723
1724void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001725 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001726}
1727
1728void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001729 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001730}
1731
1732void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001733 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001734}
1735
1736void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001737 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001738}
1739
1740void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001741 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001742}
1743
1744void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001745 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001746}
1747
1748void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001749 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001750}
1751
1752void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001753 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001754}
1755
1756void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001757 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001758}
1759
1760void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001761 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001762}
1763
Aart Bike9f37602015-10-09 11:15:55 -07001764void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001765 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001766}
1767
1768void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001769 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001770}
1771
1772void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001773 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001774}
1775
1776void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001777 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001778}
1779
1780void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001781 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001782}
1783
1784void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001785 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001786}
1787
1788void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001789 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001790}
1791
1792void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001793 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001794}
1795
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001796void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001797 LocationSummary* locations =
1798 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001799 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001800 case Primitive::kPrimBoolean:
1801 case Primitive::kPrimByte:
1802 case Primitive::kPrimShort:
1803 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001804 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001805 case Primitive::kPrimLong: {
1806 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001807 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001808 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1809 break;
1810 }
1811 case Primitive::kPrimFloat:
1812 case Primitive::kPrimDouble: {
1813 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001814 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001815 locations->SetOut(Location::RequiresRegister());
1816 break;
1817 }
1818 default:
1819 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1820 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001821}
1822
1823void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001824 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001825 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001826 Location left = locations->InAt(0);
1827 Location right = locations->InAt(1);
1828
Mark Mendell0c9497d2015-08-21 09:30:05 -04001829 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001830 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001831 Condition less_cond = kLess;
1832
Calin Juravleddb7df22014-11-25 20:56:51 +00001833 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001834 case Primitive::kPrimBoolean:
1835 case Primitive::kPrimByte:
1836 case Primitive::kPrimShort:
1837 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001838 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001839 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08001840 break;
1841 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001842 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001843 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001844 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001845 }
1846 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001847 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1848 if (right.IsConstant()) {
1849 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1850 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1851 } else if (right.IsStackSlot()) {
1852 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1853 } else {
1854 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1855 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001856 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001857 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001858 break;
1859 }
1860 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001861 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1862 if (right.IsConstant()) {
1863 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1864 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1865 } else if (right.IsDoubleStackSlot()) {
1866 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1867 } else {
1868 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1869 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001870 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001871 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001872 break;
1873 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001874 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001875 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001876 }
Aart Bika19616e2016-02-01 18:57:58 -08001877
Calin Juravleddb7df22014-11-25 20:56:51 +00001878 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001879 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001880 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001881
Calin Juravle91debbc2014-11-26 19:01:09 +00001882 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001883 __ movl(out, Immediate(1));
1884 __ jmp(&done);
1885
1886 __ Bind(&less);
1887 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001888
1889 __ Bind(&done);
1890}
1891
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001892void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001893 LocationSummary* locations =
1894 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001895 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001896}
1897
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001898void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001899 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001900}
1901
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001902void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1903 LocationSummary* locations =
1904 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1905 locations->SetOut(Location::ConstantLocation(constant));
1906}
1907
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001908void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001909 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001910}
1911
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001912void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001913 LocationSummary* locations =
1914 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001915 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001916}
1917
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001918void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001919 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001920}
1921
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001922void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1923 LocationSummary* locations =
1924 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1925 locations->SetOut(Location::ConstantLocation(constant));
1926}
1927
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001928void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001929 // Will be generated at use site.
1930}
1931
1932void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1933 LocationSummary* locations =
1934 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1935 locations->SetOut(Location::ConstantLocation(constant));
1936}
1937
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001938void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1939 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001940 // Will be generated at use site.
1941}
1942
Calin Juravle27df7582015-04-17 19:12:31 +01001943void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1944 memory_barrier->SetLocations(nullptr);
1945}
1946
1947void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001948 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001949}
1950
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001951void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
1952 ret->SetLocations(nullptr);
1953}
1954
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001955void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001956 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001957}
1958
1959void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001960 LocationSummary* locations =
1961 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001962 switch (ret->InputAt(0)->GetType()) {
1963 case Primitive::kPrimBoolean:
1964 case Primitive::kPrimByte:
1965 case Primitive::kPrimChar:
1966 case Primitive::kPrimShort:
1967 case Primitive::kPrimInt:
1968 case Primitive::kPrimNot:
1969 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001970 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001971 break;
1972
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001973 case Primitive::kPrimFloat:
1974 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04001975 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001976 break;
1977
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001978 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001979 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001980 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001981}
1982
1983void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
1984 if (kIsDebugBuild) {
1985 switch (ret->InputAt(0)->GetType()) {
1986 case Primitive::kPrimBoolean:
1987 case Primitive::kPrimByte:
1988 case Primitive::kPrimChar:
1989 case Primitive::kPrimShort:
1990 case Primitive::kPrimInt:
1991 case Primitive::kPrimNot:
1992 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00001993 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001994 break;
1995
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001996 case Primitive::kPrimFloat:
1997 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00001998 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001999 XMM0);
2000 break;
2001
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002002 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002003 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002004 }
2005 }
2006 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002007}
2008
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002009Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2010 switch (type) {
2011 case Primitive::kPrimBoolean:
2012 case Primitive::kPrimByte:
2013 case Primitive::kPrimChar:
2014 case Primitive::kPrimShort:
2015 case Primitive::kPrimInt:
2016 case Primitive::kPrimNot:
2017 case Primitive::kPrimLong:
2018 return Location::RegisterLocation(RAX);
2019
2020 case Primitive::kPrimVoid:
2021 return Location::NoLocation();
2022
2023 case Primitive::kPrimDouble:
2024 case Primitive::kPrimFloat:
2025 return Location::FpuRegisterLocation(XMM0);
2026 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002027
2028 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002029}
2030
2031Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2032 return Location::RegisterLocation(kMethodRegisterArgument);
2033}
2034
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002035Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002036 switch (type) {
2037 case Primitive::kPrimBoolean:
2038 case Primitive::kPrimByte:
2039 case Primitive::kPrimChar:
2040 case Primitive::kPrimShort:
2041 case Primitive::kPrimInt:
2042 case Primitive::kPrimNot: {
2043 uint32_t index = gp_index_++;
2044 stack_index_++;
2045 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002046 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002047 } else {
2048 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2049 }
2050 }
2051
2052 case Primitive::kPrimLong: {
2053 uint32_t index = gp_index_;
2054 stack_index_ += 2;
2055 if (index < calling_convention.GetNumberOfRegisters()) {
2056 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002057 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002058 } else {
2059 gp_index_ += 2;
2060 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2061 }
2062 }
2063
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002064 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002065 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002066 stack_index_++;
2067 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002068 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002069 } else {
2070 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2071 }
2072 }
2073
2074 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002075 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002076 stack_index_ += 2;
2077 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002078 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002079 } else {
2080 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2081 }
2082 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002083
2084 case Primitive::kPrimVoid:
2085 LOG(FATAL) << "Unexpected parameter type " << type;
2086 break;
2087 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002088 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002089}
2090
Calin Juravle175dc732015-08-25 15:42:32 +01002091void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2092 // The trampoline uses the same calling convention as dex calling conventions,
2093 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2094 // the method_idx.
2095 HandleInvoke(invoke);
2096}
2097
2098void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2099 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2100}
2101
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002102void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002103 // Explicit clinit checks triggered by static invokes must have been pruned by
2104 // art::PrepareForRegisterAllocation.
2105 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002106
Mark Mendellfb8d2792015-03-31 22:16:59 -04002107 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002108 if (intrinsic.TryDispatch(invoke)) {
2109 return;
2110 }
2111
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002112 HandleInvoke(invoke);
2113}
2114
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002115static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2116 if (invoke->GetLocations()->Intrinsified()) {
2117 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2118 intrinsic.Dispatch(invoke);
2119 return true;
2120 }
2121 return false;
2122}
2123
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002124void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002125 // Explicit clinit checks triggered by static invokes must have been pruned by
2126 // art::PrepareForRegisterAllocation.
2127 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002128
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002129 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2130 return;
2131 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002132
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002133 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002134 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002135 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002136 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002137}
2138
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002139void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002140 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002141 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002142}
2143
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002144void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002145 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002146 if (intrinsic.TryDispatch(invoke)) {
2147 return;
2148 }
2149
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002150 HandleInvoke(invoke);
2151}
2152
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002153void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002154 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2155 return;
2156 }
2157
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002158 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002159 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002160 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002161}
2162
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002163void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2164 HandleInvoke(invoke);
2165 // Add the hidden argument.
2166 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2167}
2168
2169void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2170 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002171 LocationSummary* locations = invoke->GetLocations();
2172 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2173 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002174 Location receiver = locations->InAt(0);
2175 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2176
Roland Levillain0d5a2812015-11-13 10:07:31 +00002177 // Set the hidden argument. This is safe to do this here, as RAX
2178 // won't be modified thereafter, before the `call` instruction.
2179 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002180 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002181
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002182 if (receiver.IsStackSlot()) {
2183 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002184 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002185 __ movl(temp, Address(temp, class_offset));
2186 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002187 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002188 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002189 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002190 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002191 // Instead of simply (possibly) unpoisoning `temp` here, we should
2192 // emit a read barrier for the previous class reference load.
2193 // However this is not required in practice, as this is an
2194 // intermediate/temporary reference and because the current
2195 // concurrent copying collector keeps the from-space memory
2196 // intact/accessible until the end of the marking phase (the
2197 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002198 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002199 // temp = temp->GetAddressOfIMT()
2200 __ movq(temp,
2201 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2202 // temp = temp->GetImtEntryAt(method_offset);
2203 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002204 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002205 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002206 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002207 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002208 __ call(Address(
2209 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002210
2211 DCHECK(!codegen_->IsLeafMethod());
2212 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2213}
2214
Roland Levillain88cb1752014-10-20 16:36:47 +01002215void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2216 LocationSummary* locations =
2217 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2218 switch (neg->GetResultType()) {
2219 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002220 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002221 locations->SetInAt(0, Location::RequiresRegister());
2222 locations->SetOut(Location::SameAsFirstInput());
2223 break;
2224
Roland Levillain88cb1752014-10-20 16:36:47 +01002225 case Primitive::kPrimFloat:
2226 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002227 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002228 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002229 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002230 break;
2231
2232 default:
2233 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2234 }
2235}
2236
2237void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2238 LocationSummary* locations = neg->GetLocations();
2239 Location out = locations->Out();
2240 Location in = locations->InAt(0);
2241 switch (neg->GetResultType()) {
2242 case Primitive::kPrimInt:
2243 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002244 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002245 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002246 break;
2247
2248 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002249 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002250 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002251 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002252 break;
2253
Roland Levillain5368c212014-11-27 15:03:41 +00002254 case Primitive::kPrimFloat: {
2255 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002256 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002257 // Implement float negation with an exclusive or with value
2258 // 0x80000000 (mask for bit 31, representing the sign of a
2259 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002260 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002261 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002262 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002263 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002264
Roland Levillain5368c212014-11-27 15:03:41 +00002265 case Primitive::kPrimDouble: {
2266 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002267 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002268 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002269 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002270 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002271 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002272 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002273 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002274 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002275
2276 default:
2277 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2278 }
2279}
2280
Roland Levillaindff1f282014-11-05 14:15:05 +00002281void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2282 LocationSummary* locations =
2283 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2284 Primitive::Type result_type = conversion->GetResultType();
2285 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002286 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002287
David Brazdilb2bd1c52015-03-25 11:17:37 +00002288 // The Java language does not allow treating boolean as an integral type but
2289 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002290
Roland Levillaindff1f282014-11-05 14:15:05 +00002291 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002292 case Primitive::kPrimByte:
2293 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002294 case Primitive::kPrimLong:
2295 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002296 case Primitive::kPrimBoolean:
2297 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002298 case Primitive::kPrimShort:
2299 case Primitive::kPrimInt:
2300 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002301 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002302 locations->SetInAt(0, Location::Any());
2303 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2304 break;
2305
2306 default:
2307 LOG(FATAL) << "Unexpected type conversion from " << input_type
2308 << " to " << result_type;
2309 }
2310 break;
2311
Roland Levillain01a8d712014-11-14 16:27:39 +00002312 case Primitive::kPrimShort:
2313 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002314 case Primitive::kPrimLong:
2315 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002316 case Primitive::kPrimBoolean:
2317 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002318 case Primitive::kPrimByte:
2319 case Primitive::kPrimInt:
2320 case Primitive::kPrimChar:
2321 // Processing a Dex `int-to-short' instruction.
2322 locations->SetInAt(0, Location::Any());
2323 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2324 break;
2325
2326 default:
2327 LOG(FATAL) << "Unexpected type conversion from " << input_type
2328 << " to " << result_type;
2329 }
2330 break;
2331
Roland Levillain946e1432014-11-11 17:35:19 +00002332 case Primitive::kPrimInt:
2333 switch (input_type) {
2334 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002335 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002336 locations->SetInAt(0, Location::Any());
2337 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2338 break;
2339
2340 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002341 // Processing a Dex `float-to-int' instruction.
2342 locations->SetInAt(0, Location::RequiresFpuRegister());
2343 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002344 break;
2345
Roland Levillain946e1432014-11-11 17:35:19 +00002346 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002347 // Processing a Dex `double-to-int' instruction.
2348 locations->SetInAt(0, Location::RequiresFpuRegister());
2349 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002350 break;
2351
2352 default:
2353 LOG(FATAL) << "Unexpected type conversion from " << input_type
2354 << " to " << result_type;
2355 }
2356 break;
2357
Roland Levillaindff1f282014-11-05 14:15:05 +00002358 case Primitive::kPrimLong:
2359 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002360 case Primitive::kPrimBoolean:
2361 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002362 case Primitive::kPrimByte:
2363 case Primitive::kPrimShort:
2364 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002365 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002366 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002367 // TODO: We would benefit from a (to-be-implemented)
2368 // Location::RegisterOrStackSlot requirement for this input.
2369 locations->SetInAt(0, Location::RequiresRegister());
2370 locations->SetOut(Location::RequiresRegister());
2371 break;
2372
2373 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002374 // Processing a Dex `float-to-long' instruction.
2375 locations->SetInAt(0, Location::RequiresFpuRegister());
2376 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002377 break;
2378
Roland Levillaindff1f282014-11-05 14:15:05 +00002379 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002380 // Processing a Dex `double-to-long' instruction.
2381 locations->SetInAt(0, Location::RequiresFpuRegister());
2382 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002383 break;
2384
2385 default:
2386 LOG(FATAL) << "Unexpected type conversion from " << input_type
2387 << " to " << result_type;
2388 }
2389 break;
2390
Roland Levillain981e4542014-11-14 11:47:14 +00002391 case Primitive::kPrimChar:
2392 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002393 case Primitive::kPrimLong:
2394 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002395 case Primitive::kPrimBoolean:
2396 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002397 case Primitive::kPrimByte:
2398 case Primitive::kPrimShort:
2399 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002400 // Processing a Dex `int-to-char' instruction.
2401 locations->SetInAt(0, Location::Any());
2402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2403 break;
2404
2405 default:
2406 LOG(FATAL) << "Unexpected type conversion from " << input_type
2407 << " to " << result_type;
2408 }
2409 break;
2410
Roland Levillaindff1f282014-11-05 14:15:05 +00002411 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002412 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002413 case Primitive::kPrimBoolean:
2414 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002415 case Primitive::kPrimByte:
2416 case Primitive::kPrimShort:
2417 case Primitive::kPrimInt:
2418 case Primitive::kPrimChar:
2419 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002420 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002421 locations->SetOut(Location::RequiresFpuRegister());
2422 break;
2423
2424 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002425 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002426 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002427 locations->SetOut(Location::RequiresFpuRegister());
2428 break;
2429
Roland Levillaincff13742014-11-17 14:32:17 +00002430 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002431 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002432 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002433 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002434 break;
2435
2436 default:
2437 LOG(FATAL) << "Unexpected type conversion from " << input_type
2438 << " to " << result_type;
2439 };
2440 break;
2441
Roland Levillaindff1f282014-11-05 14:15:05 +00002442 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002443 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002444 case Primitive::kPrimBoolean:
2445 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002446 case Primitive::kPrimByte:
2447 case Primitive::kPrimShort:
2448 case Primitive::kPrimInt:
2449 case Primitive::kPrimChar:
2450 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002451 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002452 locations->SetOut(Location::RequiresFpuRegister());
2453 break;
2454
2455 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002456 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002457 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002458 locations->SetOut(Location::RequiresFpuRegister());
2459 break;
2460
Roland Levillaincff13742014-11-17 14:32:17 +00002461 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002462 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002463 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002464 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002465 break;
2466
2467 default:
2468 LOG(FATAL) << "Unexpected type conversion from " << input_type
2469 << " to " << result_type;
2470 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002471 break;
2472
2473 default:
2474 LOG(FATAL) << "Unexpected type conversion from " << input_type
2475 << " to " << result_type;
2476 }
2477}
2478
2479void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2480 LocationSummary* locations = conversion->GetLocations();
2481 Location out = locations->Out();
2482 Location in = locations->InAt(0);
2483 Primitive::Type result_type = conversion->GetResultType();
2484 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002485 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002486 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002487 case Primitive::kPrimByte:
2488 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002489 case Primitive::kPrimLong:
2490 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002491 case Primitive::kPrimBoolean:
2492 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002493 case Primitive::kPrimShort:
2494 case Primitive::kPrimInt:
2495 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002496 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002497 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002498 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002499 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002500 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002501 Address(CpuRegister(RSP), in.GetStackIndex()));
2502 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002503 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002504 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002505 }
2506 break;
2507
2508 default:
2509 LOG(FATAL) << "Unexpected type conversion from " << input_type
2510 << " to " << result_type;
2511 }
2512 break;
2513
Roland Levillain01a8d712014-11-14 16:27:39 +00002514 case Primitive::kPrimShort:
2515 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002516 case Primitive::kPrimLong:
2517 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002518 case Primitive::kPrimBoolean:
2519 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002520 case Primitive::kPrimByte:
2521 case Primitive::kPrimInt:
2522 case Primitive::kPrimChar:
2523 // Processing a Dex `int-to-short' instruction.
2524 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002525 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002526 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002527 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002528 Address(CpuRegister(RSP), in.GetStackIndex()));
2529 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002530 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002531 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002532 }
2533 break;
2534
2535 default:
2536 LOG(FATAL) << "Unexpected type conversion from " << input_type
2537 << " to " << result_type;
2538 }
2539 break;
2540
Roland Levillain946e1432014-11-11 17:35:19 +00002541 case Primitive::kPrimInt:
2542 switch (input_type) {
2543 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002544 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002545 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002546 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002547 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002548 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002549 Address(CpuRegister(RSP), in.GetStackIndex()));
2550 } else {
2551 DCHECK(in.IsConstant());
2552 DCHECK(in.GetConstant()->IsLongConstant());
2553 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002554 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002555 }
2556 break;
2557
Roland Levillain3f8f9362014-12-02 17:45:01 +00002558 case Primitive::kPrimFloat: {
2559 // Processing a Dex `float-to-int' instruction.
2560 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2561 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002562 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002563
2564 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002565 // if input >= (float)INT_MAX goto done
2566 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002567 __ j(kAboveEqual, &done);
2568 // if input == NaN goto nan
2569 __ j(kUnordered, &nan);
2570 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002571 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002572 __ jmp(&done);
2573 __ Bind(&nan);
2574 // output = 0
2575 __ xorl(output, output);
2576 __ Bind(&done);
2577 break;
2578 }
2579
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002580 case Primitive::kPrimDouble: {
2581 // Processing a Dex `double-to-int' instruction.
2582 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2583 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002584 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002585
2586 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002587 // if input >= (double)INT_MAX goto done
2588 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002589 __ j(kAboveEqual, &done);
2590 // if input == NaN goto nan
2591 __ j(kUnordered, &nan);
2592 // output = double-to-int-truncate(input)
2593 __ cvttsd2si(output, input);
2594 __ jmp(&done);
2595 __ Bind(&nan);
2596 // output = 0
2597 __ xorl(output, output);
2598 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002599 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002600 }
Roland Levillain946e1432014-11-11 17:35:19 +00002601
2602 default:
2603 LOG(FATAL) << "Unexpected type conversion from " << input_type
2604 << " to " << result_type;
2605 }
2606 break;
2607
Roland Levillaindff1f282014-11-05 14:15:05 +00002608 case Primitive::kPrimLong:
2609 switch (input_type) {
2610 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002611 case Primitive::kPrimBoolean:
2612 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002613 case Primitive::kPrimByte:
2614 case Primitive::kPrimShort:
2615 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002616 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002617 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002618 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002619 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002620 break;
2621
Roland Levillain624279f2014-12-04 11:54:28 +00002622 case Primitive::kPrimFloat: {
2623 // Processing a Dex `float-to-long' instruction.
2624 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2625 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002626 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002627
Mark Mendell92e83bf2015-05-07 11:25:03 -04002628 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002629 // if input >= (float)LONG_MAX goto done
2630 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002631 __ j(kAboveEqual, &done);
2632 // if input == NaN goto nan
2633 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002634 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002635 __ cvttss2si(output, input, true);
2636 __ jmp(&done);
2637 __ Bind(&nan);
2638 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002639 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002640 __ Bind(&done);
2641 break;
2642 }
2643
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002644 case Primitive::kPrimDouble: {
2645 // Processing a Dex `double-to-long' instruction.
2646 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2647 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002648 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002649
Mark Mendell92e83bf2015-05-07 11:25:03 -04002650 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002651 // if input >= (double)LONG_MAX goto done
2652 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002653 __ j(kAboveEqual, &done);
2654 // if input == NaN goto nan
2655 __ j(kUnordered, &nan);
2656 // output = double-to-long-truncate(input)
2657 __ cvttsd2si(output, input, true);
2658 __ jmp(&done);
2659 __ Bind(&nan);
2660 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002661 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002662 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002663 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002664 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002665
2666 default:
2667 LOG(FATAL) << "Unexpected type conversion from " << input_type
2668 << " to " << result_type;
2669 }
2670 break;
2671
Roland Levillain981e4542014-11-14 11:47:14 +00002672 case Primitive::kPrimChar:
2673 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002674 case Primitive::kPrimLong:
2675 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002676 case Primitive::kPrimBoolean:
2677 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002678 case Primitive::kPrimByte:
2679 case Primitive::kPrimShort:
2680 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002681 // Processing a Dex `int-to-char' instruction.
2682 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002683 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002684 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002685 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002686 Address(CpuRegister(RSP), in.GetStackIndex()));
2687 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002688 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002689 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002690 }
2691 break;
2692
2693 default:
2694 LOG(FATAL) << "Unexpected type conversion from " << input_type
2695 << " to " << result_type;
2696 }
2697 break;
2698
Roland Levillaindff1f282014-11-05 14:15:05 +00002699 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002700 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002701 case Primitive::kPrimBoolean:
2702 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002703 case Primitive::kPrimByte:
2704 case Primitive::kPrimShort:
2705 case Primitive::kPrimInt:
2706 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002707 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002708 if (in.IsRegister()) {
2709 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2710 } else if (in.IsConstant()) {
2711 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2712 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002713 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002714 } else {
2715 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2716 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2717 }
Roland Levillaincff13742014-11-17 14:32:17 +00002718 break;
2719
2720 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002721 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002722 if (in.IsRegister()) {
2723 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2724 } else if (in.IsConstant()) {
2725 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2726 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002727 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002728 } else {
2729 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2730 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2731 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002732 break;
2733
Roland Levillaincff13742014-11-17 14:32:17 +00002734 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002735 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002736 if (in.IsFpuRegister()) {
2737 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2738 } else if (in.IsConstant()) {
2739 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2740 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002741 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002742 } else {
2743 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2744 Address(CpuRegister(RSP), in.GetStackIndex()));
2745 }
Roland Levillaincff13742014-11-17 14:32:17 +00002746 break;
2747
2748 default:
2749 LOG(FATAL) << "Unexpected type conversion from " << input_type
2750 << " to " << result_type;
2751 };
2752 break;
2753
Roland Levillaindff1f282014-11-05 14:15:05 +00002754 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002755 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002756 case Primitive::kPrimBoolean:
2757 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002758 case Primitive::kPrimByte:
2759 case Primitive::kPrimShort:
2760 case Primitive::kPrimInt:
2761 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002762 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002763 if (in.IsRegister()) {
2764 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2765 } else if (in.IsConstant()) {
2766 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2767 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002768 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002769 } else {
2770 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2771 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2772 }
Roland Levillaincff13742014-11-17 14:32:17 +00002773 break;
2774
2775 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002776 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002777 if (in.IsRegister()) {
2778 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2779 } else if (in.IsConstant()) {
2780 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2781 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002782 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002783 } else {
2784 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2785 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2786 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002787 break;
2788
Roland Levillaincff13742014-11-17 14:32:17 +00002789 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002790 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002791 if (in.IsFpuRegister()) {
2792 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2793 } else if (in.IsConstant()) {
2794 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2795 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002796 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002797 } else {
2798 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2799 Address(CpuRegister(RSP), in.GetStackIndex()));
2800 }
Roland Levillaincff13742014-11-17 14:32:17 +00002801 break;
2802
2803 default:
2804 LOG(FATAL) << "Unexpected type conversion from " << input_type
2805 << " to " << result_type;
2806 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002807 break;
2808
2809 default:
2810 LOG(FATAL) << "Unexpected type conversion from " << input_type
2811 << " to " << result_type;
2812 }
2813}
2814
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002815void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002816 LocationSummary* locations =
2817 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002818 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002819 case Primitive::kPrimInt: {
2820 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002821 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2822 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002823 break;
2824 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002825
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002826 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002827 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002828 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002829 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002830 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002831 break;
2832 }
2833
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002834 case Primitive::kPrimDouble:
2835 case Primitive::kPrimFloat: {
2836 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002837 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002838 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002839 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002840 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002841
2842 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002843 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002844 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002845}
2846
2847void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2848 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002849 Location first = locations->InAt(0);
2850 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002851 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002852
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002853 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002854 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002855 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002856 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2857 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002858 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2859 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002860 } else {
2861 __ leal(out.AsRegister<CpuRegister>(), Address(
2862 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2863 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002864 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002865 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2866 __ addl(out.AsRegister<CpuRegister>(),
2867 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2868 } else {
2869 __ leal(out.AsRegister<CpuRegister>(), Address(
2870 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2871 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002872 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002873 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002874 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002875 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002876 break;
2877 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002878
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002879 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002880 if (second.IsRegister()) {
2881 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2882 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002883 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2884 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002885 } else {
2886 __ leaq(out.AsRegister<CpuRegister>(), Address(
2887 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2888 }
2889 } else {
2890 DCHECK(second.IsConstant());
2891 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2892 int32_t int32_value = Low32Bits(value);
2893 DCHECK_EQ(int32_value, value);
2894 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2895 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2896 } else {
2897 __ leaq(out.AsRegister<CpuRegister>(), Address(
2898 first.AsRegister<CpuRegister>(), int32_value));
2899 }
2900 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002901 break;
2902 }
2903
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002904 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002905 if (second.IsFpuRegister()) {
2906 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2907 } else if (second.IsConstant()) {
2908 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002909 codegen_->LiteralFloatAddress(
2910 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002911 } else {
2912 DCHECK(second.IsStackSlot());
2913 __ addss(first.AsFpuRegister<XmmRegister>(),
2914 Address(CpuRegister(RSP), second.GetStackIndex()));
2915 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002916 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002917 }
2918
2919 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002920 if (second.IsFpuRegister()) {
2921 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2922 } else if (second.IsConstant()) {
2923 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002924 codegen_->LiteralDoubleAddress(
2925 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002926 } else {
2927 DCHECK(second.IsDoubleStackSlot());
2928 __ addsd(first.AsFpuRegister<XmmRegister>(),
2929 Address(CpuRegister(RSP), second.GetStackIndex()));
2930 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002931 break;
2932 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002933
2934 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002935 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002936 }
2937}
2938
2939void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002940 LocationSummary* locations =
2941 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002942 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002943 case Primitive::kPrimInt: {
2944 locations->SetInAt(0, Location::RequiresRegister());
2945 locations->SetInAt(1, Location::Any());
2946 locations->SetOut(Location::SameAsFirstInput());
2947 break;
2948 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002949 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002950 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04002951 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002952 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002953 break;
2954 }
Calin Juravle11351682014-10-23 15:38:15 +01002955 case Primitive::kPrimFloat:
2956 case Primitive::kPrimDouble: {
2957 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002958 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01002959 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002960 break;
Calin Juravle11351682014-10-23 15:38:15 +01002961 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962 default:
Calin Juravle11351682014-10-23 15:38:15 +01002963 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002964 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002965}
2966
2967void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
2968 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01002969 Location first = locations->InAt(0);
2970 Location second = locations->InAt(1);
2971 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002972 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002973 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01002974 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002975 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01002976 } else if (second.IsConstant()) {
2977 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002978 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002979 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002980 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002981 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002982 break;
2983 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002984 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04002985 if (second.IsConstant()) {
2986 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2987 DCHECK(IsInt<32>(value));
2988 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
2989 } else {
2990 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
2991 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002992 break;
2993 }
2994
Calin Juravle11351682014-10-23 15:38:15 +01002995 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002996 if (second.IsFpuRegister()) {
2997 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2998 } else if (second.IsConstant()) {
2999 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003000 codegen_->LiteralFloatAddress(
3001 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003002 } else {
3003 DCHECK(second.IsStackSlot());
3004 __ subss(first.AsFpuRegister<XmmRegister>(),
3005 Address(CpuRegister(RSP), second.GetStackIndex()));
3006 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003007 break;
Calin Juravle11351682014-10-23 15:38:15 +01003008 }
3009
3010 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003011 if (second.IsFpuRegister()) {
3012 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3013 } else if (second.IsConstant()) {
3014 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003015 codegen_->LiteralDoubleAddress(
3016 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003017 } else {
3018 DCHECK(second.IsDoubleStackSlot());
3019 __ subsd(first.AsFpuRegister<XmmRegister>(),
3020 Address(CpuRegister(RSP), second.GetStackIndex()));
3021 }
Calin Juravle11351682014-10-23 15:38:15 +01003022 break;
3023 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003024
3025 default:
Calin Juravle11351682014-10-23 15:38:15 +01003026 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003027 }
3028}
3029
Calin Juravle34bacdf2014-10-07 20:23:36 +01003030void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3031 LocationSummary* locations =
3032 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3033 switch (mul->GetResultType()) {
3034 case Primitive::kPrimInt: {
3035 locations->SetInAt(0, Location::RequiresRegister());
3036 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003037 if (mul->InputAt(1)->IsIntConstant()) {
3038 // Can use 3 operand multiply.
3039 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3040 } else {
3041 locations->SetOut(Location::SameAsFirstInput());
3042 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003043 break;
3044 }
3045 case Primitive::kPrimLong: {
3046 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003047 locations->SetInAt(1, Location::Any());
3048 if (mul->InputAt(1)->IsLongConstant() &&
3049 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003050 // Can use 3 operand multiply.
3051 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3052 } else {
3053 locations->SetOut(Location::SameAsFirstInput());
3054 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003055 break;
3056 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003057 case Primitive::kPrimFloat:
3058 case Primitive::kPrimDouble: {
3059 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003060 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003061 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003062 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003063 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003064
3065 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003066 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003067 }
3068}
3069
3070void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3071 LocationSummary* locations = mul->GetLocations();
3072 Location first = locations->InAt(0);
3073 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003074 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003075 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003076 case Primitive::kPrimInt:
3077 // The constant may have ended up in a register, so test explicitly to avoid
3078 // problems where the output may not be the same as the first operand.
3079 if (mul->InputAt(1)->IsIntConstant()) {
3080 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3081 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3082 } else if (second.IsRegister()) {
3083 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003084 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003085 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003086 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003087 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003088 __ imull(first.AsRegister<CpuRegister>(),
3089 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003090 }
3091 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003092 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003093 // The constant may have ended up in a register, so test explicitly to avoid
3094 // problems where the output may not be the same as the first operand.
3095 if (mul->InputAt(1)->IsLongConstant()) {
3096 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3097 if (IsInt<32>(value)) {
3098 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3099 Immediate(static_cast<int32_t>(value)));
3100 } else {
3101 // Have to use the constant area.
3102 DCHECK(first.Equals(out));
3103 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3104 }
3105 } else if (second.IsRegister()) {
3106 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003107 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003108 } else {
3109 DCHECK(second.IsDoubleStackSlot());
3110 DCHECK(first.Equals(out));
3111 __ imulq(first.AsRegister<CpuRegister>(),
3112 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003113 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003114 break;
3115 }
3116
Calin Juravleb5bfa962014-10-21 18:02:24 +01003117 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003118 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003119 if (second.IsFpuRegister()) {
3120 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3121 } else if (second.IsConstant()) {
3122 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003123 codegen_->LiteralFloatAddress(
3124 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003125 } else {
3126 DCHECK(second.IsStackSlot());
3127 __ mulss(first.AsFpuRegister<XmmRegister>(),
3128 Address(CpuRegister(RSP), second.GetStackIndex()));
3129 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003130 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003131 }
3132
3133 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003134 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003135 if (second.IsFpuRegister()) {
3136 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3137 } else if (second.IsConstant()) {
3138 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003139 codegen_->LiteralDoubleAddress(
3140 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003141 } else {
3142 DCHECK(second.IsDoubleStackSlot());
3143 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3144 Address(CpuRegister(RSP), second.GetStackIndex()));
3145 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003146 break;
3147 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003148
3149 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003150 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003151 }
3152}
3153
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003154void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3155 uint32_t stack_adjustment, bool is_float) {
3156 if (source.IsStackSlot()) {
3157 DCHECK(is_float);
3158 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3159 } else if (source.IsDoubleStackSlot()) {
3160 DCHECK(!is_float);
3161 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3162 } else {
3163 // Write the value to the temporary location on the stack and load to FP stack.
3164 if (is_float) {
3165 Location stack_temp = Location::StackSlot(temp_offset);
3166 codegen_->Move(stack_temp, source);
3167 __ flds(Address(CpuRegister(RSP), temp_offset));
3168 } else {
3169 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3170 codegen_->Move(stack_temp, source);
3171 __ fldl(Address(CpuRegister(RSP), temp_offset));
3172 }
3173 }
3174}
3175
3176void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3177 Primitive::Type type = rem->GetResultType();
3178 bool is_float = type == Primitive::kPrimFloat;
3179 size_t elem_size = Primitive::ComponentSize(type);
3180 LocationSummary* locations = rem->GetLocations();
3181 Location first = locations->InAt(0);
3182 Location second = locations->InAt(1);
3183 Location out = locations->Out();
3184
3185 // Create stack space for 2 elements.
3186 // TODO: enhance register allocator to ask for stack temporaries.
3187 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3188
3189 // Load the values to the FP stack in reverse order, using temporaries if needed.
3190 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3191 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3192
3193 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003194 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003195 __ Bind(&retry);
3196 __ fprem();
3197
3198 // Move FP status to AX.
3199 __ fstsw();
3200
3201 // And see if the argument reduction is complete. This is signaled by the
3202 // C2 FPU flag bit set to 0.
3203 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3204 __ j(kNotEqual, &retry);
3205
3206 // We have settled on the final value. Retrieve it into an XMM register.
3207 // Store FP top of stack to real stack.
3208 if (is_float) {
3209 __ fsts(Address(CpuRegister(RSP), 0));
3210 } else {
3211 __ fstl(Address(CpuRegister(RSP), 0));
3212 }
3213
3214 // Pop the 2 items from the FP stack.
3215 __ fucompp();
3216
3217 // Load the value from the stack into an XMM register.
3218 DCHECK(out.IsFpuRegister()) << out;
3219 if (is_float) {
3220 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3221 } else {
3222 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3223 }
3224
3225 // And remove the temporary stack space we allocated.
3226 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3227}
3228
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003229void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3230 DCHECK(instruction->IsDiv() || instruction->IsRem());
3231
3232 LocationSummary* locations = instruction->GetLocations();
3233 Location second = locations->InAt(1);
3234 DCHECK(second.IsConstant());
3235
3236 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3237 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003238 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003239
3240 DCHECK(imm == 1 || imm == -1);
3241
3242 switch (instruction->GetResultType()) {
3243 case Primitive::kPrimInt: {
3244 if (instruction->IsRem()) {
3245 __ xorl(output_register, output_register);
3246 } else {
3247 __ movl(output_register, input_register);
3248 if (imm == -1) {
3249 __ negl(output_register);
3250 }
3251 }
3252 break;
3253 }
3254
3255 case Primitive::kPrimLong: {
3256 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003257 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003258 } else {
3259 __ movq(output_register, input_register);
3260 if (imm == -1) {
3261 __ negq(output_register);
3262 }
3263 }
3264 break;
3265 }
3266
3267 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003268 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003269 }
3270}
3271
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003272void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003273 LocationSummary* locations = instruction->GetLocations();
3274 Location second = locations->InAt(1);
3275
3276 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3277 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3278
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003279 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003280 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3281 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003282
3283 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3284
3285 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003286 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003287 __ testl(numerator, numerator);
3288 __ cmov(kGreaterEqual, tmp, numerator);
3289 int shift = CTZ(imm);
3290 __ sarl(tmp, Immediate(shift));
3291
3292 if (imm < 0) {
3293 __ negl(tmp);
3294 }
3295
3296 __ movl(output_register, tmp);
3297 } else {
3298 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3299 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3300
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003301 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003302 __ addq(rdx, numerator);
3303 __ testq(numerator, numerator);
3304 __ cmov(kGreaterEqual, rdx, numerator);
3305 int shift = CTZ(imm);
3306 __ sarq(rdx, Immediate(shift));
3307
3308 if (imm < 0) {
3309 __ negq(rdx);
3310 }
3311
3312 __ movq(output_register, rdx);
3313 }
3314}
3315
3316void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3317 DCHECK(instruction->IsDiv() || instruction->IsRem());
3318
3319 LocationSummary* locations = instruction->GetLocations();
3320 Location second = locations->InAt(1);
3321
3322 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3323 : locations->GetTemp(0).AsRegister<CpuRegister>();
3324 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3325 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3326 : locations->Out().AsRegister<CpuRegister>();
3327 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3328
3329 DCHECK_EQ(RAX, eax.AsRegister());
3330 DCHECK_EQ(RDX, edx.AsRegister());
3331 if (instruction->IsDiv()) {
3332 DCHECK_EQ(RAX, out.AsRegister());
3333 } else {
3334 DCHECK_EQ(RDX, out.AsRegister());
3335 }
3336
3337 int64_t magic;
3338 int shift;
3339
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003340 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003341 if (instruction->GetResultType() == Primitive::kPrimInt) {
3342 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3343
3344 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3345
3346 __ movl(numerator, eax);
3347
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003348 __ movl(eax, Immediate(magic));
3349 __ imull(numerator);
3350
3351 if (imm > 0 && magic < 0) {
3352 __ addl(edx, numerator);
3353 } else if (imm < 0 && magic > 0) {
3354 __ subl(edx, numerator);
3355 }
3356
3357 if (shift != 0) {
3358 __ sarl(edx, Immediate(shift));
3359 }
3360
3361 __ movl(eax, edx);
3362 __ shrl(edx, Immediate(31));
3363 __ addl(edx, eax);
3364
3365 if (instruction->IsRem()) {
3366 __ movl(eax, numerator);
3367 __ imull(edx, Immediate(imm));
3368 __ subl(eax, edx);
3369 __ movl(edx, eax);
3370 } else {
3371 __ movl(eax, edx);
3372 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003373 } else {
3374 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3375
3376 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3377
3378 CpuRegister rax = eax;
3379 CpuRegister rdx = edx;
3380
3381 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3382
3383 // Save the numerator.
3384 __ movq(numerator, rax);
3385
3386 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003387 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003388
3389 // RDX:RAX = magic * numerator
3390 __ imulq(numerator);
3391
3392 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003393 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003394 __ addq(rdx, numerator);
3395 } else if (imm < 0 && magic > 0) {
3396 // RDX -= numerator
3397 __ subq(rdx, numerator);
3398 }
3399
3400 // Shift if needed.
3401 if (shift != 0) {
3402 __ sarq(rdx, Immediate(shift));
3403 }
3404
3405 // RDX += 1 if RDX < 0
3406 __ movq(rax, rdx);
3407 __ shrq(rdx, Immediate(63));
3408 __ addq(rdx, rax);
3409
3410 if (instruction->IsRem()) {
3411 __ movq(rax, numerator);
3412
3413 if (IsInt<32>(imm)) {
3414 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3415 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003416 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003417 }
3418
3419 __ subq(rax, rdx);
3420 __ movq(rdx, rax);
3421 } else {
3422 __ movq(rax, rdx);
3423 }
3424 }
3425}
3426
Calin Juravlebacfec32014-11-14 15:54:36 +00003427void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3428 DCHECK(instruction->IsDiv() || instruction->IsRem());
3429 Primitive::Type type = instruction->GetResultType();
3430 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3431
3432 bool is_div = instruction->IsDiv();
3433 LocationSummary* locations = instruction->GetLocations();
3434
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003435 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3436 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003437
Roland Levillain271ab9c2014-11-27 15:23:57 +00003438 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003439 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003440
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003441 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003442 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003443
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003444 if (imm == 0) {
3445 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3446 } else if (imm == 1 || imm == -1) {
3447 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003448 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003449 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003450 } else {
3451 DCHECK(imm <= -2 || imm >= 2);
3452 GenerateDivRemWithAnyConstant(instruction);
3453 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003454 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003455 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003456 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003457 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003458 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003459
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003460 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3461 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3462 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3463 // so it's safe to just use negl instead of more complex comparisons.
3464 if (type == Primitive::kPrimInt) {
3465 __ cmpl(second_reg, Immediate(-1));
3466 __ j(kEqual, slow_path->GetEntryLabel());
3467 // edx:eax <- sign-extended of eax
3468 __ cdq();
3469 // eax = quotient, edx = remainder
3470 __ idivl(second_reg);
3471 } else {
3472 __ cmpq(second_reg, Immediate(-1));
3473 __ j(kEqual, slow_path->GetEntryLabel());
3474 // rdx:rax <- sign-extended of rax
3475 __ cqo();
3476 // rax = quotient, rdx = remainder
3477 __ idivq(second_reg);
3478 }
3479 __ Bind(slow_path->GetExitLabel());
3480 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003481}
3482
Calin Juravle7c4954d2014-10-28 16:57:40 +00003483void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3484 LocationSummary* locations =
3485 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3486 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003487 case Primitive::kPrimInt:
3488 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003489 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003490 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003491 locations->SetOut(Location::SameAsFirstInput());
3492 // Intel uses edx:eax as the dividend.
3493 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003494 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3495 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3496 // output and request another temp.
3497 if (div->InputAt(1)->IsConstant()) {
3498 locations->AddTemp(Location::RequiresRegister());
3499 }
Calin Juravled0d48522014-11-04 16:40:20 +00003500 break;
3501 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003502
Calin Juravle7c4954d2014-10-28 16:57:40 +00003503 case Primitive::kPrimFloat:
3504 case Primitive::kPrimDouble: {
3505 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003506 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003507 locations->SetOut(Location::SameAsFirstInput());
3508 break;
3509 }
3510
3511 default:
3512 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3513 }
3514}
3515
3516void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3517 LocationSummary* locations = div->GetLocations();
3518 Location first = locations->InAt(0);
3519 Location second = locations->InAt(1);
3520 DCHECK(first.Equals(locations->Out()));
3521
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003522 Primitive::Type type = div->GetResultType();
3523 switch (type) {
3524 case Primitive::kPrimInt:
3525 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003526 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003527 break;
3528 }
3529
Calin Juravle7c4954d2014-10-28 16:57:40 +00003530 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003531 if (second.IsFpuRegister()) {
3532 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3533 } else if (second.IsConstant()) {
3534 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003535 codegen_->LiteralFloatAddress(
3536 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003537 } else {
3538 DCHECK(second.IsStackSlot());
3539 __ divss(first.AsFpuRegister<XmmRegister>(),
3540 Address(CpuRegister(RSP), second.GetStackIndex()));
3541 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003542 break;
3543 }
3544
3545 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003546 if (second.IsFpuRegister()) {
3547 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3548 } else if (second.IsConstant()) {
3549 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003550 codegen_->LiteralDoubleAddress(
3551 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003552 } else {
3553 DCHECK(second.IsDoubleStackSlot());
3554 __ divsd(first.AsFpuRegister<XmmRegister>(),
3555 Address(CpuRegister(RSP), second.GetStackIndex()));
3556 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003557 break;
3558 }
3559
3560 default:
3561 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3562 }
3563}
3564
Calin Juravlebacfec32014-11-14 15:54:36 +00003565void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003566 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003567 LocationSummary* locations =
3568 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003569
3570 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003571 case Primitive::kPrimInt:
3572 case Primitive::kPrimLong: {
3573 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003574 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003575 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3576 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003577 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3578 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3579 // output and request another temp.
3580 if (rem->InputAt(1)->IsConstant()) {
3581 locations->AddTemp(Location::RequiresRegister());
3582 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003583 break;
3584 }
3585
3586 case Primitive::kPrimFloat:
3587 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003588 locations->SetInAt(0, Location::Any());
3589 locations->SetInAt(1, Location::Any());
3590 locations->SetOut(Location::RequiresFpuRegister());
3591 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003592 break;
3593 }
3594
3595 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003596 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003597 }
3598}
3599
3600void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3601 Primitive::Type type = rem->GetResultType();
3602 switch (type) {
3603 case Primitive::kPrimInt:
3604 case Primitive::kPrimLong: {
3605 GenerateDivRemIntegral(rem);
3606 break;
3607 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003608 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003609 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003610 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003611 break;
3612 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003613 default:
3614 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3615 }
3616}
3617
Calin Juravled0d48522014-11-04 16:40:20 +00003618void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003619 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003620 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003621}
3622
3623void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003624 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003625 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3626 codegen_->AddSlowPath(slow_path);
3627
3628 LocationSummary* locations = instruction->GetLocations();
3629 Location value = locations->InAt(0);
3630
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003631 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003632 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003633 case Primitive::kPrimByte:
3634 case Primitive::kPrimChar:
3635 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003636 case Primitive::kPrimInt: {
3637 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003638 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003639 __ j(kEqual, slow_path->GetEntryLabel());
3640 } else if (value.IsStackSlot()) {
3641 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3642 __ j(kEqual, slow_path->GetEntryLabel());
3643 } else {
3644 DCHECK(value.IsConstant()) << value;
3645 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003646 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003647 }
3648 }
3649 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003650 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003651 case Primitive::kPrimLong: {
3652 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003653 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003654 __ j(kEqual, slow_path->GetEntryLabel());
3655 } else if (value.IsDoubleStackSlot()) {
3656 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3657 __ j(kEqual, slow_path->GetEntryLabel());
3658 } else {
3659 DCHECK(value.IsConstant()) << value;
3660 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003661 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003662 }
3663 }
3664 break;
3665 }
3666 default:
3667 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003668 }
Calin Juravled0d48522014-11-04 16:40:20 +00003669}
3670
Calin Juravle9aec02f2014-11-18 23:06:35 +00003671void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3672 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3673
3674 LocationSummary* locations =
3675 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3676
3677 switch (op->GetResultType()) {
3678 case Primitive::kPrimInt:
3679 case Primitive::kPrimLong: {
3680 locations->SetInAt(0, Location::RequiresRegister());
3681 // The shift count needs to be in CL.
3682 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3683 locations->SetOut(Location::SameAsFirstInput());
3684 break;
3685 }
3686 default:
3687 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3688 }
3689}
3690
3691void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3692 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3693
3694 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003695 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003696 Location second = locations->InAt(1);
3697
3698 switch (op->GetResultType()) {
3699 case Primitive::kPrimInt: {
3700 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003701 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003702 if (op->IsShl()) {
3703 __ shll(first_reg, second_reg);
3704 } else if (op->IsShr()) {
3705 __ sarl(first_reg, second_reg);
3706 } else {
3707 __ shrl(first_reg, second_reg);
3708 }
3709 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003710 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003711 if (op->IsShl()) {
3712 __ shll(first_reg, imm);
3713 } else if (op->IsShr()) {
3714 __ sarl(first_reg, imm);
3715 } else {
3716 __ shrl(first_reg, imm);
3717 }
3718 }
3719 break;
3720 }
3721 case Primitive::kPrimLong: {
3722 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003723 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003724 if (op->IsShl()) {
3725 __ shlq(first_reg, second_reg);
3726 } else if (op->IsShr()) {
3727 __ sarq(first_reg, second_reg);
3728 } else {
3729 __ shrq(first_reg, second_reg);
3730 }
3731 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003732 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003733 if (op->IsShl()) {
3734 __ shlq(first_reg, imm);
3735 } else if (op->IsShr()) {
3736 __ sarq(first_reg, imm);
3737 } else {
3738 __ shrq(first_reg, imm);
3739 }
3740 }
3741 break;
3742 }
3743 default:
3744 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003745 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003746 }
3747}
3748
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003749void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3750 LocationSummary* locations =
3751 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3752
3753 switch (ror->GetResultType()) {
3754 case Primitive::kPrimInt:
3755 case Primitive::kPrimLong: {
3756 locations->SetInAt(0, Location::RequiresRegister());
3757 // The shift count needs to be in CL (unless it is a constant).
3758 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3759 locations->SetOut(Location::SameAsFirstInput());
3760 break;
3761 }
3762 default:
3763 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3764 UNREACHABLE();
3765 }
3766}
3767
3768void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3769 LocationSummary* locations = ror->GetLocations();
3770 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3771 Location second = locations->InAt(1);
3772
3773 switch (ror->GetResultType()) {
3774 case Primitive::kPrimInt:
3775 if (second.IsRegister()) {
3776 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3777 __ rorl(first_reg, second_reg);
3778 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003779 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003780 __ rorl(first_reg, imm);
3781 }
3782 break;
3783 case Primitive::kPrimLong:
3784 if (second.IsRegister()) {
3785 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3786 __ rorq(first_reg, second_reg);
3787 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003788 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003789 __ rorq(first_reg, imm);
3790 }
3791 break;
3792 default:
3793 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3794 UNREACHABLE();
3795 }
3796}
3797
Calin Juravle9aec02f2014-11-18 23:06:35 +00003798void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3799 HandleShift(shl);
3800}
3801
3802void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3803 HandleShift(shl);
3804}
3805
3806void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3807 HandleShift(shr);
3808}
3809
3810void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3811 HandleShift(shr);
3812}
3813
3814void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3815 HandleShift(ushr);
3816}
3817
3818void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3819 HandleShift(ushr);
3820}
3821
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003822void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003823 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003824 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003825 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003826 if (instruction->IsStringAlloc()) {
3827 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3828 } else {
3829 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3830 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3831 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003832 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003833}
3834
3835void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003836 // Note: if heap poisoning is enabled, the entry point takes cares
3837 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003838 if (instruction->IsStringAlloc()) {
3839 // String is allocated through StringFactory. Call NewEmptyString entry point.
3840 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07003841 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003842 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3843 __ call(Address(temp, code_offset.SizeValue()));
3844 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3845 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003846 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003847 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3848 DCHECK(!codegen_->IsLeafMethod());
3849 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003850}
3851
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003852void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3853 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003854 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003855 InvokeRuntimeCallingConvention calling_convention;
3856 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003857 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003858 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003859 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003860}
3861
3862void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3863 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003864 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3865 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003866 // Note: if heap poisoning is enabled, the entry point takes cares
3867 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01003868 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003869 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003870
3871 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003872}
3873
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003874void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003875 LocationSummary* locations =
3876 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003877 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3878 if (location.IsStackSlot()) {
3879 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3880 } else if (location.IsDoubleStackSlot()) {
3881 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3882 }
3883 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003884}
3885
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003886void InstructionCodeGeneratorX86_64::VisitParameterValue(
3887 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003888 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003889}
3890
3891void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3892 LocationSummary* locations =
3893 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3894 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3895}
3896
3897void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3898 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3899 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003900}
3901
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003902void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3903 LocationSummary* locations =
3904 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3905 locations->SetInAt(0, Location::RequiresRegister());
3906 locations->SetOut(Location::RequiresRegister());
3907}
3908
3909void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3910 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00003911 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003912 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003913 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003914 __ movq(locations->Out().AsRegister<CpuRegister>(),
3915 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003916 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003917 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003918 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003919 __ movq(locations->Out().AsRegister<CpuRegister>(),
3920 Address(locations->InAt(0).AsRegister<CpuRegister>(),
3921 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003922 __ movq(locations->Out().AsRegister<CpuRegister>(),
3923 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003924 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003925}
3926
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003927void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003928 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003929 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003930 locations->SetInAt(0, Location::RequiresRegister());
3931 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003932}
3933
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003934void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3935 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003936 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3937 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003938 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003939 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003940 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003941 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003942 break;
3943
3944 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003945 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003946 break;
3947
3948 default:
3949 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3950 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003951}
3952
David Brazdil66d126e2015-04-03 16:02:44 +01003953void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
3954 LocationSummary* locations =
3955 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
3956 locations->SetInAt(0, Location::RequiresRegister());
3957 locations->SetOut(Location::SameAsFirstInput());
3958}
3959
3960void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01003961 LocationSummary* locations = bool_not->GetLocations();
3962 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3963 locations->Out().AsRegister<CpuRegister>().AsRegister());
3964 Location out = locations->Out();
3965 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
3966}
3967
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003968void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003969 LocationSummary* locations =
3970 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003971 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003972 locations->SetInAt(i, Location::Any());
3973 }
3974 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003975}
3976
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003977void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003978 LOG(FATAL) << "Unimplemented";
3979}
3980
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003981void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00003982 /*
3983 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003984 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00003985 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
3986 */
3987 switch (kind) {
3988 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00003989 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00003990 break;
3991 }
3992 case MemBarrierKind::kAnyStore:
3993 case MemBarrierKind::kLoadAny:
3994 case MemBarrierKind::kStoreStore: {
3995 // nop
3996 break;
3997 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05003998 case MemBarrierKind::kNTStoreStore:
3999 // Non-Temporal Store/Store needs an explicit fence.
4000 MemoryFence(/* non-temporal */ true);
4001 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004002 }
4003}
4004
4005void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4006 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4007
Roland Levillain0d5a2812015-11-13 10:07:31 +00004008 bool object_field_get_with_read_barrier =
4009 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004010 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004011 new (GetGraph()->GetArena()) LocationSummary(instruction,
4012 object_field_get_with_read_barrier ?
4013 LocationSummary::kCallOnSlowPath :
4014 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004015 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004016 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004017 }
Calin Juravle52c48962014-12-16 17:02:57 +00004018 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004019 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4020 locations->SetOut(Location::RequiresFpuRegister());
4021 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004022 // The output overlaps for an object field get when read barriers
4023 // are enabled: we do not want the move to overwrite the object's
4024 // location, as we need it to emit the read barrier.
4025 locations->SetOut(
4026 Location::RequiresRegister(),
4027 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004028 }
Calin Juravle52c48962014-12-16 17:02:57 +00004029}
4030
4031void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4032 const FieldInfo& field_info) {
4033 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4034
4035 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004036 Location base_loc = locations->InAt(0);
4037 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004038 Location out = locations->Out();
4039 bool is_volatile = field_info.IsVolatile();
4040 Primitive::Type field_type = field_info.GetFieldType();
4041 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4042
4043 switch (field_type) {
4044 case Primitive::kPrimBoolean: {
4045 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4046 break;
4047 }
4048
4049 case Primitive::kPrimByte: {
4050 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4051 break;
4052 }
4053
4054 case Primitive::kPrimShort: {
4055 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4056 break;
4057 }
4058
4059 case Primitive::kPrimChar: {
4060 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4061 break;
4062 }
4063
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004064 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004065 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4066 break;
4067 }
4068
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004069 case Primitive::kPrimNot: {
4070 // /* HeapReference<Object> */ out = *(base + offset)
4071 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004072 // Note that a potential implicit null check is handled in this
4073 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4074 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004075 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004076 if (is_volatile) {
4077 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4078 }
4079 } else {
4080 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4081 codegen_->MaybeRecordImplicitNullCheck(instruction);
4082 if (is_volatile) {
4083 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4084 }
4085 // If read barriers are enabled, emit read barriers other than
4086 // Baker's using a slow path (and also unpoison the loaded
4087 // reference, if heap poisoning is enabled).
4088 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4089 }
4090 break;
4091 }
4092
Calin Juravle52c48962014-12-16 17:02:57 +00004093 case Primitive::kPrimLong: {
4094 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4095 break;
4096 }
4097
4098 case Primitive::kPrimFloat: {
4099 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4100 break;
4101 }
4102
4103 case Primitive::kPrimDouble: {
4104 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4105 break;
4106 }
4107
4108 case Primitive::kPrimVoid:
4109 LOG(FATAL) << "Unreachable type " << field_type;
4110 UNREACHABLE();
4111 }
4112
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004113 if (field_type == Primitive::kPrimNot) {
4114 // Potential implicit null checks, in the case of reference
4115 // fields, are handled in the previous switch statement.
4116 } else {
4117 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004118 }
Roland Levillain4d027112015-07-01 15:41:14 +01004119
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004120 if (is_volatile) {
4121 if (field_type == Primitive::kPrimNot) {
4122 // Memory barriers, in the case of references, are also handled
4123 // in the previous switch statement.
4124 } else {
4125 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4126 }
Roland Levillain4d027112015-07-01 15:41:14 +01004127 }
Calin Juravle52c48962014-12-16 17:02:57 +00004128}
4129
4130void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4131 const FieldInfo& field_info) {
4132 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4133
4134 LocationSummary* locations =
4135 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004136 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004137 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004138 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004139 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004140
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004141 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004142 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004143 if (is_volatile) {
4144 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4145 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4146 } else {
4147 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4148 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004149 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004150 if (is_volatile) {
4151 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4152 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4153 } else {
4154 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4155 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004156 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004157 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004158 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004159 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004160 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004161 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4162 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004163 locations->AddTemp(Location::RequiresRegister());
4164 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004165}
4166
Calin Juravle52c48962014-12-16 17:02:57 +00004167void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004168 const FieldInfo& field_info,
4169 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004170 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4171
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004172 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004173 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4174 Location value = locations->InAt(1);
4175 bool is_volatile = field_info.IsVolatile();
4176 Primitive::Type field_type = field_info.GetFieldType();
4177 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4178
4179 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004180 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004181 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004182
Mark Mendellea5af682015-10-22 17:35:49 -04004183 bool maybe_record_implicit_null_check_done = false;
4184
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004185 switch (field_type) {
4186 case Primitive::kPrimBoolean:
4187 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004188 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004189 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004190 __ movb(Address(base, offset), Immediate(v));
4191 } else {
4192 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4193 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004194 break;
4195 }
4196
4197 case Primitive::kPrimShort:
4198 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004199 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004200 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004201 __ movw(Address(base, offset), Immediate(v));
4202 } else {
4203 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4204 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004205 break;
4206 }
4207
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004208 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004209 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004210 if (value.IsConstant()) {
4211 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004212 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4213 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4214 // Note: if heap poisoning is enabled, no need to poison
4215 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004216 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004217 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004218 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4219 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4220 __ movl(temp, value.AsRegister<CpuRegister>());
4221 __ PoisonHeapReference(temp);
4222 __ movl(Address(base, offset), temp);
4223 } else {
4224 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4225 }
Mark Mendell40741f32015-04-20 22:10:34 -04004226 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004227 break;
4228 }
4229
4230 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004231 if (value.IsConstant()) {
4232 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004233 codegen_->MoveInt64ToAddress(Address(base, offset),
4234 Address(base, offset + sizeof(int32_t)),
4235 v,
4236 instruction);
4237 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004238 } else {
4239 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4240 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004241 break;
4242 }
4243
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004244 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004245 if (value.IsConstant()) {
4246 int32_t v =
4247 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4248 __ movl(Address(base, offset), Immediate(v));
4249 } else {
4250 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4251 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004252 break;
4253 }
4254
4255 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004256 if (value.IsConstant()) {
4257 int64_t v =
4258 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4259 codegen_->MoveInt64ToAddress(Address(base, offset),
4260 Address(base, offset + sizeof(int32_t)),
4261 v,
4262 instruction);
4263 maybe_record_implicit_null_check_done = true;
4264 } else {
4265 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4266 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004267 break;
4268 }
4269
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004270 case Primitive::kPrimVoid:
4271 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004272 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004273 }
Calin Juravle52c48962014-12-16 17:02:57 +00004274
Mark Mendellea5af682015-10-22 17:35:49 -04004275 if (!maybe_record_implicit_null_check_done) {
4276 codegen_->MaybeRecordImplicitNullCheck(instruction);
4277 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004278
4279 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4280 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4281 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004282 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004283 }
4284
Calin Juravle52c48962014-12-16 17:02:57 +00004285 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004286 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004287 }
4288}
4289
4290void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4291 HandleFieldSet(instruction, instruction->GetFieldInfo());
4292}
4293
4294void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004295 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004296}
4297
4298void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004299 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004300}
4301
4302void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004303 HandleFieldGet(instruction, instruction->GetFieldInfo());
4304}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004305
Calin Juravle52c48962014-12-16 17:02:57 +00004306void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4307 HandleFieldGet(instruction);
4308}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004309
Calin Juravle52c48962014-12-16 17:02:57 +00004310void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4311 HandleFieldGet(instruction, instruction->GetFieldInfo());
4312}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004313
Calin Juravle52c48962014-12-16 17:02:57 +00004314void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4315 HandleFieldSet(instruction, instruction->GetFieldInfo());
4316}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004317
Calin Juravle52c48962014-12-16 17:02:57 +00004318void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004319 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004320}
4321
Calin Juravlee460d1d2015-09-29 04:52:17 +01004322void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4323 HUnresolvedInstanceFieldGet* instruction) {
4324 FieldAccessCallingConventionX86_64 calling_convention;
4325 codegen_->CreateUnresolvedFieldLocationSummary(
4326 instruction, instruction->GetFieldType(), calling_convention);
4327}
4328
4329void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4330 HUnresolvedInstanceFieldGet* instruction) {
4331 FieldAccessCallingConventionX86_64 calling_convention;
4332 codegen_->GenerateUnresolvedFieldAccess(instruction,
4333 instruction->GetFieldType(),
4334 instruction->GetFieldIndex(),
4335 instruction->GetDexPc(),
4336 calling_convention);
4337}
4338
4339void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4340 HUnresolvedInstanceFieldSet* instruction) {
4341 FieldAccessCallingConventionX86_64 calling_convention;
4342 codegen_->CreateUnresolvedFieldLocationSummary(
4343 instruction, instruction->GetFieldType(), calling_convention);
4344}
4345
4346void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4347 HUnresolvedInstanceFieldSet* instruction) {
4348 FieldAccessCallingConventionX86_64 calling_convention;
4349 codegen_->GenerateUnresolvedFieldAccess(instruction,
4350 instruction->GetFieldType(),
4351 instruction->GetFieldIndex(),
4352 instruction->GetDexPc(),
4353 calling_convention);
4354}
4355
4356void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4357 HUnresolvedStaticFieldGet* instruction) {
4358 FieldAccessCallingConventionX86_64 calling_convention;
4359 codegen_->CreateUnresolvedFieldLocationSummary(
4360 instruction, instruction->GetFieldType(), calling_convention);
4361}
4362
4363void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4364 HUnresolvedStaticFieldGet* instruction) {
4365 FieldAccessCallingConventionX86_64 calling_convention;
4366 codegen_->GenerateUnresolvedFieldAccess(instruction,
4367 instruction->GetFieldType(),
4368 instruction->GetFieldIndex(),
4369 instruction->GetDexPc(),
4370 calling_convention);
4371}
4372
4373void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4374 HUnresolvedStaticFieldSet* instruction) {
4375 FieldAccessCallingConventionX86_64 calling_convention;
4376 codegen_->CreateUnresolvedFieldLocationSummary(
4377 instruction, instruction->GetFieldType(), calling_convention);
4378}
4379
4380void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4381 HUnresolvedStaticFieldSet* instruction) {
4382 FieldAccessCallingConventionX86_64 calling_convention;
4383 codegen_->GenerateUnresolvedFieldAccess(instruction,
4384 instruction->GetFieldType(),
4385 instruction->GetFieldIndex(),
4386 instruction->GetDexPc(),
4387 calling_convention);
4388}
4389
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004390void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004391 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4392 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4393 ? Location::RequiresRegister()
4394 : Location::Any();
4395 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004396}
4397
Calin Juravle2ae48182016-03-16 14:05:09 +00004398void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4399 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004400 return;
4401 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004402 LocationSummary* locations = instruction->GetLocations();
4403 Location obj = locations->InAt(0);
4404
4405 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004406 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004407}
4408
Calin Juravle2ae48182016-03-16 14:05:09 +00004409void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004410 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004411 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004412
4413 LocationSummary* locations = instruction->GetLocations();
4414 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004415
4416 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004417 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004418 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004419 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004420 } else {
4421 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004422 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004423 __ jmp(slow_path->GetEntryLabel());
4424 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004425 }
4426 __ j(kEqual, slow_path->GetEntryLabel());
4427}
4428
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004429void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004430 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004431}
4432
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004433void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004434 bool object_array_get_with_read_barrier =
4435 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004436 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004437 new (GetGraph()->GetArena()) LocationSummary(instruction,
4438 object_array_get_with_read_barrier ?
4439 LocationSummary::kCallOnSlowPath :
4440 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004441 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004442 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004443 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004444 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004445 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004446 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4447 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4448 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004449 // The output overlaps for an object array get when read barriers
4450 // are enabled: we do not want the move to overwrite the array's
4451 // location, as we need it to emit the read barrier.
4452 locations->SetOut(
4453 Location::RequiresRegister(),
4454 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004455 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004456}
4457
4458void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4459 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004460 Location obj_loc = locations->InAt(0);
4461 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004462 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004463 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004464 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004465
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004466 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004467 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004468 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004469 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004470 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004471 break;
4472 }
4473
4474 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004475 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004476 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004477 break;
4478 }
4479
4480 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004481 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004482 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004483 break;
4484 }
4485
4486 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004487 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004488 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004489 break;
4490 }
4491
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004492 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004493 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004494 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004495 break;
4496 }
4497
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004498 case Primitive::kPrimNot: {
4499 static_assert(
4500 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4501 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004502 // /* HeapReference<Object> */ out =
4503 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4504 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004505 // Note that a potential implicit null check is handled in this
4506 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4507 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004508 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004509 } else {
4510 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004511 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4512 codegen_->MaybeRecordImplicitNullCheck(instruction);
4513 // If read barriers are enabled, emit read barriers other than
4514 // Baker's using a slow path (and also unpoison the loaded
4515 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004516 if (index.IsConstant()) {
4517 uint32_t offset =
4518 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004519 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4520 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004521 codegen_->MaybeGenerateReadBarrierSlow(
4522 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4523 }
4524 }
4525 break;
4526 }
4527
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004528 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004529 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004530 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004531 break;
4532 }
4533
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004534 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004535 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004536 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004537 break;
4538 }
4539
4540 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004541 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004542 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004543 break;
4544 }
4545
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004546 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004547 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004548 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004549 }
Roland Levillain4d027112015-07-01 15:41:14 +01004550
4551 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004552 // Potential implicit null checks, in the case of reference
4553 // arrays, are handled in the previous switch statement.
4554 } else {
4555 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004556 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004557}
4558
4559void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004560 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004561
4562 bool needs_write_barrier =
4563 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004564 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004565
Nicolas Geoffray39468442014-09-02 15:17:15 +01004566 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004567 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004568 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004569 LocationSummary::kCallOnSlowPath :
4570 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004571
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004572 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004573 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4574 if (Primitive::IsFloatingPointType(value_type)) {
4575 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004576 } else {
4577 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4578 }
4579
4580 if (needs_write_barrier) {
4581 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004582 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004583 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004584 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004585}
4586
4587void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4588 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004589 Location array_loc = locations->InAt(0);
4590 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004591 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004592 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004593 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004594 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004595 bool needs_write_barrier =
4596 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004597 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4598 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4599 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004600
4601 switch (value_type) {
4602 case Primitive::kPrimBoolean:
4603 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004604 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004605 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004606 if (value.IsRegister()) {
4607 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004608 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004609 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004610 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004611 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004612 break;
4613 }
4614
4615 case Primitive::kPrimShort:
4616 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004617 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004618 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004619 if (value.IsRegister()) {
4620 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004621 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004622 DCHECK(value.IsConstant()) << value;
4623 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004624 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004625 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004626 break;
4627 }
4628
4629 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004630 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004631 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004632
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004633 if (!value.IsRegister()) {
4634 // Just setting null.
4635 DCHECK(instruction->InputAt(2)->IsNullConstant());
4636 DCHECK(value.IsConstant()) << value;
4637 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004638 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004639 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004640 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004641 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004642 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004643
4644 DCHECK(needs_write_barrier);
4645 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004646 // We cannot use a NearLabel for `done`, as its range may be too
4647 // short when Baker read barriers are enabled.
4648 Label done;
4649 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004650 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004651 Location temp_loc = locations->GetTemp(0);
4652 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004653 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004654 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4655 codegen_->AddSlowPath(slow_path);
4656 if (instruction->GetValueCanBeNull()) {
4657 __ testl(register_value, register_value);
4658 __ j(kNotEqual, &not_null);
4659 __ movl(address, Immediate(0));
4660 codegen_->MaybeRecordImplicitNullCheck(instruction);
4661 __ jmp(&done);
4662 __ Bind(&not_null);
4663 }
4664
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004665 // Note that when Baker read barriers are enabled, the type
4666 // checks are performed without read barriers. This is fine,
4667 // even in the case where a class object is in the from-space
4668 // after the flip, as a comparison involving such a type would
4669 // not produce a false positive; it may of course produce a
4670 // false negative, in which case we would take the ArraySet
4671 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004672
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004673 // /* HeapReference<Class> */ temp = array->klass_
4674 __ movl(temp, Address(array, class_offset));
4675 codegen_->MaybeRecordImplicitNullCheck(instruction);
4676 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004677
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004678 // /* HeapReference<Class> */ temp = temp->component_type_
4679 __ movl(temp, Address(temp, component_offset));
4680 // If heap poisoning is enabled, no need to unpoison `temp`
4681 // nor the object reference in `register_value->klass`, as
4682 // we are comparing two poisoned references.
4683 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004684
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004685 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4686 __ j(kEqual, &do_put);
4687 // If heap poisoning is enabled, the `temp` reference has
4688 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004689 __ MaybeUnpoisonHeapReference(temp);
4690
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004691 // If heap poisoning is enabled, no need to unpoison the
4692 // heap reference loaded below, as it is only used for a
4693 // comparison with null.
4694 __ cmpl(Address(temp, super_offset), Immediate(0));
4695 __ j(kNotEqual, slow_path->GetEntryLabel());
4696 __ Bind(&do_put);
4697 } else {
4698 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004699 }
4700 }
4701
4702 if (kPoisonHeapReferences) {
4703 __ movl(temp, register_value);
4704 __ PoisonHeapReference(temp);
4705 __ movl(address, temp);
4706 } else {
4707 __ movl(address, register_value);
4708 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004709 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004710 codegen_->MaybeRecordImplicitNullCheck(instruction);
4711 }
4712
4713 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4714 codegen_->MarkGCCard(
4715 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4716 __ Bind(&done);
4717
4718 if (slow_path != nullptr) {
4719 __ Bind(slow_path->GetExitLabel());
4720 }
4721
4722 break;
4723 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004724
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004725 case Primitive::kPrimInt: {
4726 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004727 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004728 if (value.IsRegister()) {
4729 __ movl(address, value.AsRegister<CpuRegister>());
4730 } else {
4731 DCHECK(value.IsConstant()) << value;
4732 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4733 __ movl(address, Immediate(v));
4734 }
4735 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004736 break;
4737 }
4738
4739 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004740 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004741 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004742 if (value.IsRegister()) {
4743 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004744 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004745 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004746 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004747 Address address_high =
4748 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004749 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004750 }
4751 break;
4752 }
4753
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004754 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004755 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004756 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004757 if (value.IsFpuRegister()) {
4758 __ movss(address, value.AsFpuRegister<XmmRegister>());
4759 } else {
4760 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004761 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004762 __ movl(address, Immediate(v));
4763 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004764 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004765 break;
4766 }
4767
4768 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004769 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004770 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004771 if (value.IsFpuRegister()) {
4772 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4773 codegen_->MaybeRecordImplicitNullCheck(instruction);
4774 } else {
4775 int64_t v =
4776 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004777 Address address_high =
4778 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004779 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4780 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004781 break;
4782 }
4783
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004784 case Primitive::kPrimVoid:
4785 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004786 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004787 }
4788}
4789
4790void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004791 LocationSummary* locations =
4792 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004793 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04004794 if (!instruction->IsEmittedAtUseSite()) {
4795 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4796 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004797}
4798
4799void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04004800 if (instruction->IsEmittedAtUseSite()) {
4801 return;
4802 }
4803
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004804 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004805 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004806 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4807 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004808 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004809 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004810}
4811
4812void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004813 RegisterSet caller_saves = RegisterSet::Empty();
4814 InvokeRuntimeCallingConvention calling_convention;
4815 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4816 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4817 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004818 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04004819 HInstruction* length = instruction->InputAt(1);
4820 if (!length->IsEmittedAtUseSite()) {
4821 locations->SetInAt(1, Location::RegisterOrConstant(length));
4822 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004823}
4824
4825void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4826 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004827 Location index_loc = locations->InAt(0);
4828 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04004829 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004830
Mark Mendell99dbd682015-04-22 16:18:52 -04004831 if (length_loc.IsConstant()) {
4832 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4833 if (index_loc.IsConstant()) {
4834 // BCE will remove the bounds check if we are guarenteed to pass.
4835 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4836 if (index < 0 || index >= length) {
4837 codegen_->AddSlowPath(slow_path);
4838 __ jmp(slow_path->GetEntryLabel());
4839 } else {
4840 // Some optimization after BCE may have generated this, and we should not
4841 // generate a bounds check if it is a valid range.
4842 }
4843 return;
4844 }
4845
4846 // We have to reverse the jump condition because the length is the constant.
4847 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
4848 __ cmpl(index_reg, Immediate(length));
4849 codegen_->AddSlowPath(slow_path);
4850 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004851 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04004852 HInstruction* array_length = instruction->InputAt(1);
4853 if (array_length->IsEmittedAtUseSite()) {
4854 // Address the length field in the array.
4855 DCHECK(array_length->IsArrayLength());
4856 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
4857 Location array_loc = array_length->GetLocations()->InAt(0);
4858 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
4859 if (index_loc.IsConstant()) {
4860 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4861 __ cmpl(array_len, Immediate(value));
4862 } else {
4863 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
4864 }
4865 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04004866 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004867 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04004868 }
4869 codegen_->AddSlowPath(slow_path);
4870 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004871 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004872}
4873
4874void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
4875 CpuRegister card,
4876 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004877 CpuRegister value,
4878 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004879 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004880 if (value_can_be_null) {
4881 __ testl(value, value);
4882 __ j(kEqual, &is_null);
4883 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004884 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004885 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004886 __ movq(temp, object);
4887 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01004888 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004889 if (value_can_be_null) {
4890 __ Bind(&is_null);
4891 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004892}
4893
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004894void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004895 LOG(FATAL) << "Unimplemented";
4896}
4897
4898void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004899 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4900}
4901
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004902void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004903 LocationSummary* locations =
4904 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004905 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004906}
4907
4908void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004909 HBasicBlock* block = instruction->GetBlock();
4910 if (block->GetLoopInformation() != nullptr) {
4911 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4912 // The back edge will generate the suspend check.
4913 return;
4914 }
4915 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4916 // The goto will generate the suspend check.
4917 return;
4918 }
4919 GenerateSuspendCheck(instruction, nullptr);
4920}
4921
4922void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
4923 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004924 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01004925 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
4926 if (slow_path == nullptr) {
4927 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
4928 instruction->SetSlowPath(slow_path);
4929 codegen_->AddSlowPath(slow_path);
4930 if (successor != nullptr) {
4931 DCHECK(successor->IsLoopHeader());
4932 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
4933 }
4934 } else {
4935 DCHECK_EQ(slow_path->GetSuccessor(), successor);
4936 }
4937
Andreas Gampe542451c2016-07-26 09:02:02 -07004938 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004939 /* no_rip */ true),
4940 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004941 if (successor == nullptr) {
4942 __ j(kNotEqual, slow_path->GetEntryLabel());
4943 __ Bind(slow_path->GetReturnLabel());
4944 } else {
4945 __ j(kEqual, codegen_->GetLabelOf(successor));
4946 __ jmp(slow_path->GetEntryLabel());
4947 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004948}
4949
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004950X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
4951 return codegen_->GetAssembler();
4952}
4953
4954void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01004955 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004956 Location source = move->GetSource();
4957 Location destination = move->GetDestination();
4958
4959 if (source.IsRegister()) {
4960 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004961 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004962 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004963 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004964 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004965 } else {
4966 DCHECK(destination.IsDoubleStackSlot());
4967 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004968 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004969 }
4970 } else if (source.IsStackSlot()) {
4971 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004972 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004973 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004974 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004975 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004976 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004977 } else {
4978 DCHECK(destination.IsStackSlot());
4979 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
4980 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
4981 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004982 } else if (source.IsDoubleStackSlot()) {
4983 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004984 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004985 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004986 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00004987 __ movsd(destination.AsFpuRegister<XmmRegister>(),
4988 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004989 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01004990 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004991 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
4992 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
4993 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004994 } else if (source.IsConstant()) {
4995 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004996 if (constant->IsIntConstant() || constant->IsNullConstant()) {
4997 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004998 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004999 if (value == 0) {
5000 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5001 } else {
5002 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5003 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005004 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005005 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005006 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005007 }
5008 } else if (constant->IsLongConstant()) {
5009 int64_t value = constant->AsLongConstant()->GetValue();
5010 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005011 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005012 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005013 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005014 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005015 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005016 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005017 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005018 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005019 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005020 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005021 } else {
5022 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005023 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005024 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5025 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005026 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005027 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005028 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005029 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005030 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005031 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005032 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005033 } else {
5034 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005035 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005036 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005037 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005038 } else if (source.IsFpuRegister()) {
5039 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005040 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005041 } else if (destination.IsStackSlot()) {
5042 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005043 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005044 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005045 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005046 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005047 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005048 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005049 }
5050}
5051
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005052void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005053 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005054 __ movl(Address(CpuRegister(RSP), mem), reg);
5055 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005056}
5057
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005058void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005059 ScratchRegisterScope ensure_scratch(
5060 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5061
5062 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5063 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5064 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5065 Address(CpuRegister(RSP), mem2 + stack_offset));
5066 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5067 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5068 CpuRegister(ensure_scratch.GetRegister()));
5069}
5070
Mark Mendell8a1c7282015-06-29 15:41:28 -04005071void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5072 __ movq(CpuRegister(TMP), reg1);
5073 __ movq(reg1, reg2);
5074 __ movq(reg2, CpuRegister(TMP));
5075}
5076
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005077void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5078 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5079 __ movq(Address(CpuRegister(RSP), mem), reg);
5080 __ movq(reg, CpuRegister(TMP));
5081}
5082
5083void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5084 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005085 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005086
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005087 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5088 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5089 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5090 Address(CpuRegister(RSP), mem2 + stack_offset));
5091 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5092 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5093 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005094}
5095
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005096void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5097 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5098 __ movss(Address(CpuRegister(RSP), mem), reg);
5099 __ movd(reg, CpuRegister(TMP));
5100}
5101
5102void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5103 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5104 __ movsd(Address(CpuRegister(RSP), mem), reg);
5105 __ movd(reg, CpuRegister(TMP));
5106}
5107
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005108void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005109 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005110 Location source = move->GetSource();
5111 Location destination = move->GetDestination();
5112
5113 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005114 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005115 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005116 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005117 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005118 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005119 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005120 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5121 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005122 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005123 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005124 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005125 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5126 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005127 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005128 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5129 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5130 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005131 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005132 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005133 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005134 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005135 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005136 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005137 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005138 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005139 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005140 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005141 }
5142}
5143
5144
5145void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5146 __ pushq(CpuRegister(reg));
5147}
5148
5149
5150void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5151 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005152}
5153
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005154void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005155 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005156 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5157 Immediate(mirror::Class::kStatusInitialized));
5158 __ j(kLess, slow_path->GetEntryLabel());
5159 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005160 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005161}
5162
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005163HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5164 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005165 switch (desired_class_load_kind) {
5166 case HLoadClass::LoadKind::kReferrersClass:
5167 break;
5168 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5169 DCHECK(!GetCompilerOptions().GetCompilePic());
5170 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5171 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5172 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5173 DCHECK(GetCompilerOptions().GetCompilePic());
5174 break;
5175 case HLoadClass::LoadKind::kBootImageAddress:
5176 break;
5177 case HLoadClass::LoadKind::kDexCacheAddress:
5178 DCHECK(Runtime::Current()->UseJitCompilation());
5179 break;
5180 case HLoadClass::LoadKind::kDexCachePcRelative:
5181 DCHECK(!Runtime::Current()->UseJitCompilation());
5182 break;
5183 case HLoadClass::LoadKind::kDexCacheViaMethod:
5184 break;
5185 }
5186 return desired_class_load_kind;
5187}
5188
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005189void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005190 if (cls->NeedsAccessCheck()) {
5191 InvokeRuntimeCallingConvention calling_convention;
5192 CodeGenerator::CreateLoadClassLocationSummary(
5193 cls,
5194 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5195 Location::RegisterLocation(RAX),
5196 /* code_generator_supports_read_barrier */ true);
5197 return;
5198 }
5199
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005200 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5201 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005202 ? LocationSummary::kCallOnSlowPath
5203 : LocationSummary::kNoCall;
5204 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005205 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005206 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005207 }
5208
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005209 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5210 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5211 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5212 locations->SetInAt(0, Location::RequiresRegister());
5213 }
5214 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005215}
5216
5217void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005218 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005219 if (cls->NeedsAccessCheck()) {
5220 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01005221 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005222 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005223 return;
5224 }
5225
Roland Levillain0d5a2812015-11-13 10:07:31 +00005226 Location out_loc = locations->Out();
5227 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005228
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005229 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005230 bool generate_null_check = false;
5231 switch (cls->GetLoadKind()) {
5232 case HLoadClass::LoadKind::kReferrersClass: {
5233 DCHECK(!cls->CanCallRuntime());
5234 DCHECK(!cls->MustGenerateClinitCheck());
5235 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5236 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5237 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005238 cls,
5239 out_loc,
5240 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
5241 /*fixup_label*/nullptr,
5242 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005243 break;
5244 }
5245 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005246 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005247 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5248 codegen_->RecordTypePatch(cls);
5249 break;
5250 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005251 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005252 DCHECK_NE(cls->GetAddress(), 0u);
5253 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5254 __ movl(out, Immediate(address)); // Zero-extended.
5255 codegen_->RecordSimplePatch();
5256 break;
5257 }
5258 case HLoadClass::LoadKind::kDexCacheAddress: {
5259 DCHECK_NE(cls->GetAddress(), 0u);
5260 // /* GcRoot<mirror::Class> */ out = *address
5261 if (IsUint<32>(cls->GetAddress())) {
5262 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005263 GenerateGcRootFieldLoad(cls,
5264 out_loc,
5265 address,
5266 /*fixup_label*/nullptr,
5267 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005268 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005269 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5270 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005271 GenerateGcRootFieldLoad(cls,
5272 out_loc,
5273 Address(out, 0),
5274 /*fixup_label*/nullptr,
5275 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005276 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005277 generate_null_check = !cls->IsInDexCache();
5278 break;
5279 }
5280 case HLoadClass::LoadKind::kDexCachePcRelative: {
5281 uint32_t offset = cls->GetDexCacheElementOffset();
5282 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5283 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5284 /* no_rip */ false);
5285 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005286 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005287 generate_null_check = !cls->IsInDexCache();
5288 break;
5289 }
5290 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5291 // /* GcRoot<mirror::Class>[] */ out =
5292 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5293 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5294 __ movq(out,
5295 Address(current_method,
5296 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5297 // /* GcRoot<mirror::Class> */ out = out[type_index]
5298 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005299 cls,
5300 out_loc,
5301 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
5302 /*fixup_label*/nullptr,
5303 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005304 generate_null_check = !cls->IsInDexCache();
5305 break;
5306 }
5307 default:
5308 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5309 UNREACHABLE();
5310 }
5311
5312 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5313 DCHECK(cls->CanCallRuntime());
5314 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5315 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5316 codegen_->AddSlowPath(slow_path);
5317 if (generate_null_check) {
5318 __ testl(out, out);
5319 __ j(kEqual, slow_path->GetEntryLabel());
5320 }
5321 if (cls->MustGenerateClinitCheck()) {
5322 GenerateClassInitializationCheck(slow_path, out);
5323 } else {
5324 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005325 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005326 }
5327}
5328
5329void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5330 LocationSummary* locations =
5331 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5332 locations->SetInAt(0, Location::RequiresRegister());
5333 if (check->HasUses()) {
5334 locations->SetOut(Location::SameAsFirstInput());
5335 }
5336}
5337
5338void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005339 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005340 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005341 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005342 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005343 GenerateClassInitializationCheck(slow_path,
5344 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005345}
5346
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005347HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5348 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005349 switch (desired_string_load_kind) {
5350 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5351 DCHECK(!GetCompilerOptions().GetCompilePic());
5352 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5353 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5354 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5355 DCHECK(GetCompilerOptions().GetCompilePic());
5356 break;
5357 case HLoadString::LoadKind::kBootImageAddress:
5358 break;
5359 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005360 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005361 break;
5362 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005363 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005364 break;
5365 case HLoadString::LoadKind::kDexCacheViaMethod:
5366 break;
5367 }
5368 return desired_string_load_kind;
5369}
5370
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005371void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005372 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
5373 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005374 : LocationSummary::kNoCall;
5375 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005376 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5377 locations->SetInAt(0, Location::RequiresRegister());
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005378 locations->SetOut(Location::RegisterLocation(RAX));
5379 } else {
5380 locations->SetOut(Location::RequiresRegister());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005381 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005382}
5383
5384void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005385 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005386 Location out_loc = locations->Out();
5387 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005388
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005389 switch (load->GetLoadKind()) {
5390 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005391 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5392 codegen_->RecordStringPatch(load);
5393 return; // No dex cache slow path.
5394 }
5395 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005396 DCHECK_NE(load->GetAddress(), 0u);
5397 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5398 __ movl(out, Immediate(address)); // Zero-extended.
5399 codegen_->RecordSimplePatch();
5400 return; // No dex cache slow path.
5401 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005402 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005403 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005404 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005405
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005406 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005407 InvokeRuntimeCallingConvention calling_convention;
5408 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
5409 Immediate(load->GetStringIndex()));
5410 codegen_->InvokeRuntime(kQuickResolveString,
5411 load,
5412 load->GetDexPc());
5413 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005414}
5415
David Brazdilcb1c0552015-08-04 16:22:25 +01005416static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005417 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005418 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005419}
5420
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005421void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5422 LocationSummary* locations =
5423 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5424 locations->SetOut(Location::RequiresRegister());
5425}
5426
5427void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005428 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5429}
5430
5431void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5432 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5433}
5434
5435void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5436 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005437}
5438
5439void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5440 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005441 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005442 InvokeRuntimeCallingConvention calling_convention;
5443 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5444}
5445
5446void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005447 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005448 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005449}
5450
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005451static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5452 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005453 !kUseBakerReadBarrier &&
5454 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005455 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5456 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5457}
5458
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005459void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005460 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005461 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005462 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005463 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005464 case TypeCheckKind::kExactCheck:
5465 case TypeCheckKind::kAbstractClassCheck:
5466 case TypeCheckKind::kClassHierarchyCheck:
5467 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005468 call_kind =
5469 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005470 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005471 break;
5472 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005473 case TypeCheckKind::kUnresolvedCheck:
5474 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005475 call_kind = LocationSummary::kCallOnSlowPath;
5476 break;
5477 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005478
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005479 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005480 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005481 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005482 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005483 locations->SetInAt(0, Location::RequiresRegister());
5484 locations->SetInAt(1, Location::Any());
5485 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5486 locations->SetOut(Location::RequiresRegister());
5487 // When read barriers are enabled, we need a temporary register for
5488 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005489 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005490 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005491 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005492}
5493
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005494void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005495 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005496 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005497 Location obj_loc = locations->InAt(0);
5498 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005499 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005500 Location out_loc = locations->Out();
5501 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005502 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005503 locations->GetTemp(0) :
5504 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005505 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005506 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5507 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5508 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005509 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005510 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005511
5512 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005513 // Avoid null check if we know obj is not null.
5514 if (instruction->MustDoNullCheck()) {
5515 __ testl(obj, obj);
5516 __ j(kEqual, &zero);
5517 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005518
Roland Levillain0d5a2812015-11-13 10:07:31 +00005519 // /* HeapReference<Class> */ out = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005520 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005521
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005522 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005523 case TypeCheckKind::kExactCheck: {
5524 if (cls.IsRegister()) {
5525 __ cmpl(out, cls.AsRegister<CpuRegister>());
5526 } else {
5527 DCHECK(cls.IsStackSlot()) << cls;
5528 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5529 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005530 if (zero.IsLinked()) {
5531 // Classes must be equal for the instanceof to succeed.
5532 __ j(kNotEqual, &zero);
5533 __ movl(out, Immediate(1));
5534 __ jmp(&done);
5535 } else {
5536 __ setcc(kEqual, out);
5537 // setcc only sets the low byte.
5538 __ andl(out, Immediate(1));
5539 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005540 break;
5541 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005542
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005543 case TypeCheckKind::kAbstractClassCheck: {
5544 // If the class is abstract, we eagerly fetch the super class of the
5545 // object to avoid doing a comparison we know will fail.
5546 NearLabel loop, success;
5547 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005548 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005549 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005550 __ testl(out, out);
5551 // If `out` is null, we use it for the result, and jump to `done`.
5552 __ j(kEqual, &done);
5553 if (cls.IsRegister()) {
5554 __ cmpl(out, cls.AsRegister<CpuRegister>());
5555 } else {
5556 DCHECK(cls.IsStackSlot()) << cls;
5557 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5558 }
5559 __ j(kNotEqual, &loop);
5560 __ movl(out, Immediate(1));
5561 if (zero.IsLinked()) {
5562 __ jmp(&done);
5563 }
5564 break;
5565 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005566
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005567 case TypeCheckKind::kClassHierarchyCheck: {
5568 // Walk over the class hierarchy to find a match.
5569 NearLabel loop, success;
5570 __ Bind(&loop);
5571 if (cls.IsRegister()) {
5572 __ cmpl(out, cls.AsRegister<CpuRegister>());
5573 } else {
5574 DCHECK(cls.IsStackSlot()) << cls;
5575 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5576 }
5577 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005578 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005579 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005580 __ testl(out, out);
5581 __ j(kNotEqual, &loop);
5582 // If `out` is null, we use it for the result, and jump to `done`.
5583 __ jmp(&done);
5584 __ Bind(&success);
5585 __ movl(out, Immediate(1));
5586 if (zero.IsLinked()) {
5587 __ jmp(&done);
5588 }
5589 break;
5590 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005591
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005592 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005593 // Do an exact check.
5594 NearLabel exact_check;
5595 if (cls.IsRegister()) {
5596 __ cmpl(out, cls.AsRegister<CpuRegister>());
5597 } else {
5598 DCHECK(cls.IsStackSlot()) << cls;
5599 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5600 }
5601 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005602 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005603 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005604 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005605 __ testl(out, out);
5606 // If `out` is null, we use it for the result, and jump to `done`.
5607 __ j(kEqual, &done);
5608 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5609 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005610 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005611 __ movl(out, Immediate(1));
5612 __ jmp(&done);
5613 break;
5614 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005615
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005616 case TypeCheckKind::kArrayCheck: {
5617 if (cls.IsRegister()) {
5618 __ cmpl(out, cls.AsRegister<CpuRegister>());
5619 } else {
5620 DCHECK(cls.IsStackSlot()) << cls;
5621 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5622 }
5623 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005624 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5625 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005626 codegen_->AddSlowPath(slow_path);
5627 __ j(kNotEqual, slow_path->GetEntryLabel());
5628 __ movl(out, Immediate(1));
5629 if (zero.IsLinked()) {
5630 __ jmp(&done);
5631 }
5632 break;
5633 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005634
Calin Juravle98893e12015-10-02 21:05:03 +01005635 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005636 case TypeCheckKind::kInterfaceCheck: {
5637 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005638 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005639 // cases.
5640 //
5641 // We cannot directly call the InstanceofNonTrivial runtime
5642 // entry point without resorting to a type checking slow path
5643 // here (i.e. by calling InvokeRuntime directly), as it would
5644 // require to assign fixed registers for the inputs of this
5645 // HInstanceOf instruction (following the runtime calling
5646 // convention), which might be cluttered by the potential first
5647 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005648 //
5649 // TODO: Introduce a new runtime entry point taking the object
5650 // to test (instead of its class) as argument, and let it deal
5651 // with the read barrier issues. This will let us refactor this
5652 // case of the `switch` code as it was previously (with a direct
5653 // call to the runtime not using a type checking slow path).
5654 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005655 DCHECK(locations->OnlyCallsOnSlowPath());
5656 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5657 /* is_fatal */ false);
5658 codegen_->AddSlowPath(slow_path);
5659 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005660 if (zero.IsLinked()) {
5661 __ jmp(&done);
5662 }
5663 break;
5664 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005665 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005666
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005667 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005668 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005669 __ xorl(out, out);
5670 }
5671
5672 if (done.IsLinked()) {
5673 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005674 }
5675
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005676 if (slow_path != nullptr) {
5677 __ Bind(slow_path->GetExitLabel());
5678 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005679}
5680
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005681void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005682 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5683 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005684 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5685 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005686 case TypeCheckKind::kExactCheck:
5687 case TypeCheckKind::kAbstractClassCheck:
5688 case TypeCheckKind::kClassHierarchyCheck:
5689 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005690 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5691 LocationSummary::kCallOnSlowPath :
5692 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005693 break;
5694 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005695 case TypeCheckKind::kUnresolvedCheck:
5696 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005697 call_kind = LocationSummary::kCallOnSlowPath;
5698 break;
5699 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005700 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5701 locations->SetInAt(0, Location::RequiresRegister());
5702 locations->SetInAt(1, Location::Any());
5703 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5704 locations->AddTemp(Location::RequiresRegister());
5705 // When read barriers are enabled, we need an additional temporary
5706 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005707 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005708 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005709 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005710}
5711
5712void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005713 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005714 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005715 Location obj_loc = locations->InAt(0);
5716 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005717 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005718 Location temp_loc = locations->GetTemp(0);
5719 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005720 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005721 locations->GetTemp(1) :
5722 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005723 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5724 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5725 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5726 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005727
Roland Levillain0d5a2812015-11-13 10:07:31 +00005728 bool is_type_check_slow_path_fatal =
5729 (type_check_kind == TypeCheckKind::kExactCheck ||
5730 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5731 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5732 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5733 !instruction->CanThrowIntoCatchBlock();
5734 SlowPathCode* type_check_slow_path =
5735 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5736 is_type_check_slow_path_fatal);
5737 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005738
Roland Levillain0d5a2812015-11-13 10:07:31 +00005739 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005740 case TypeCheckKind::kExactCheck:
5741 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005742 NearLabel done;
5743 // Avoid null check if we know obj is not null.
5744 if (instruction->MustDoNullCheck()) {
5745 __ testl(obj, obj);
5746 __ j(kEqual, &done);
5747 }
5748
5749 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005750 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005751
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005752 if (cls.IsRegister()) {
5753 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5754 } else {
5755 DCHECK(cls.IsStackSlot()) << cls;
5756 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5757 }
5758 // Jump to slow path for throwing the exception or doing a
5759 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005760 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005761 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005762 break;
5763 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005764
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005765 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005766 NearLabel done;
5767 // Avoid null check if we know obj is not null.
5768 if (instruction->MustDoNullCheck()) {
5769 __ testl(obj, obj);
5770 __ j(kEqual, &done);
5771 }
5772
5773 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005774 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005775
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005776 // If the class is abstract, we eagerly fetch the super class of the
5777 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005778 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005779 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005780 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005781 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005782
5783 // If the class reference currently in `temp` is not null, jump
5784 // to the `compare_classes` label to compare it with the checked
5785 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005786 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005787 __ j(kNotEqual, &compare_classes);
5788 // Otherwise, jump to the slow path to throw the exception.
5789 //
5790 // But before, move back the object's class into `temp` before
5791 // going into the slow path, as it has been overwritten in the
5792 // meantime.
5793 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005794 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005795 __ jmp(type_check_slow_path->GetEntryLabel());
5796
5797 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005798 if (cls.IsRegister()) {
5799 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5800 } else {
5801 DCHECK(cls.IsStackSlot()) << cls;
5802 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5803 }
5804 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005805 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005806 break;
5807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005808
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005809 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005810 NearLabel done;
5811 // Avoid null check if we know obj is not null.
5812 if (instruction->MustDoNullCheck()) {
5813 __ testl(obj, obj);
5814 __ j(kEqual, &done);
5815 }
5816
5817 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005818 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005819
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005820 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005821 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005822 __ Bind(&loop);
5823 if (cls.IsRegister()) {
5824 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5825 } else {
5826 DCHECK(cls.IsStackSlot()) << cls;
5827 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5828 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005829 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005830
Roland Levillain0d5a2812015-11-13 10:07:31 +00005831 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005832 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005833
5834 // If the class reference currently in `temp` is not null, jump
5835 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005836 __ testl(temp, temp);
5837 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005838 // Otherwise, jump to the slow path to throw the exception.
5839 //
5840 // But before, move back the object's class into `temp` before
5841 // going into the slow path, as it has been overwritten in the
5842 // meantime.
5843 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005844 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005845 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005846 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005847 break;
5848 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005849
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005850 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005851 // We cannot use a NearLabel here, as its range might be too
5852 // short in some cases when read barriers are enabled. This has
5853 // been observed for instance when the code emitted for this
5854 // case uses high x86-64 registers (R8-R15).
5855 Label done;
5856 // Avoid null check if we know obj is not null.
5857 if (instruction->MustDoNullCheck()) {
5858 __ testl(obj, obj);
5859 __ j(kEqual, &done);
5860 }
5861
5862 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005863 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005864
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005865 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005866 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005867 if (cls.IsRegister()) {
5868 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5869 } else {
5870 DCHECK(cls.IsStackSlot()) << cls;
5871 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5872 }
5873 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874
5875 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005876 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005877 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005878
5879 // If the component type is not null (i.e. the object is indeed
5880 // an array), jump to label `check_non_primitive_component_type`
5881 // to further check that this component type is not a primitive
5882 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005883 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005884 __ j(kNotEqual, &check_non_primitive_component_type);
5885 // Otherwise, jump to the slow path to throw the exception.
5886 //
5887 // But before, move back the object's class into `temp` before
5888 // going into the slow path, as it has been overwritten in the
5889 // meantime.
5890 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005891 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005892 __ jmp(type_check_slow_path->GetEntryLabel());
5893
5894 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005895 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005896 __ j(kEqual, &done);
5897 // Same comment as above regarding `temp` and the slow path.
5898 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005899 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005900 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005901 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005902 break;
5903 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005904
Calin Juravle98893e12015-10-02 21:05:03 +01005905 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005906 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005907 NearLabel done;
5908 // Avoid null check if we know obj is not null.
5909 if (instruction->MustDoNullCheck()) {
5910 __ testl(obj, obj);
5911 __ j(kEqual, &done);
5912 }
5913
5914 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005915 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005916
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005917 // We always go into the type check slow path for the unresolved
5918 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005919 //
5920 // We cannot directly call the CheckCast runtime entry point
5921 // without resorting to a type checking slow path here (i.e. by
5922 // calling InvokeRuntime directly), as it would require to
5923 // assign fixed registers for the inputs of this HInstanceOf
5924 // instruction (following the runtime calling convention), which
5925 // might be cluttered by the potential first read barrier
5926 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005927 //
5928 // TODO: Introduce a new runtime entry point taking the object
5929 // to test (instead of its class) as argument, and let it deal
5930 // with the read barrier issues. This will let us refactor this
5931 // case of the `switch` code as it was previously (with a direct
5932 // call to the runtime not using a type checking slow path).
5933 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005934 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005935 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005936 break;
5937 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005938
Roland Levillain0d5a2812015-11-13 10:07:31 +00005939 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005940}
5941
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005942void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5943 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005944 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005945 InvokeRuntimeCallingConvention calling_convention;
5946 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5947}
5948
5949void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005950 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01005951 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01005952 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005953 if (instruction->IsEnter()) {
5954 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5955 } else {
5956 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5957 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005958}
5959
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005960void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
5961void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
5962void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
5963
5964void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5965 LocationSummary* locations =
5966 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5967 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5968 || instruction->GetResultType() == Primitive::kPrimLong);
5969 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005970 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005971 locations->SetOut(Location::SameAsFirstInput());
5972}
5973
5974void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
5975 HandleBitwiseOperation(instruction);
5976}
5977
5978void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
5979 HandleBitwiseOperation(instruction);
5980}
5981
5982void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
5983 HandleBitwiseOperation(instruction);
5984}
5985
5986void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5987 LocationSummary* locations = instruction->GetLocations();
5988 Location first = locations->InAt(0);
5989 Location second = locations->InAt(1);
5990 DCHECK(first.Equals(locations->Out()));
5991
5992 if (instruction->GetResultType() == Primitive::kPrimInt) {
5993 if (second.IsRegister()) {
5994 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005995 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005996 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005997 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005998 } else {
5999 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006000 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006001 }
6002 } else if (second.IsConstant()) {
6003 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6004 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006005 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006006 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006007 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006008 } else {
6009 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006010 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006011 }
6012 } else {
6013 Address address(CpuRegister(RSP), second.GetStackIndex());
6014 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006015 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006016 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006017 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006018 } else {
6019 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006020 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006021 }
6022 }
6023 } else {
6024 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006025 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6026 bool second_is_constant = false;
6027 int64_t value = 0;
6028 if (second.IsConstant()) {
6029 second_is_constant = true;
6030 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006031 }
Mark Mendell40741f32015-04-20 22:10:34 -04006032 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006033
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006034 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006035 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006036 if (is_int32_value) {
6037 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6038 } else {
6039 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6040 }
6041 } else if (second.IsDoubleStackSlot()) {
6042 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006043 } else {
6044 __ andq(first_reg, second.AsRegister<CpuRegister>());
6045 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006046 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006047 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006048 if (is_int32_value) {
6049 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6050 } else {
6051 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6052 }
6053 } else if (second.IsDoubleStackSlot()) {
6054 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006055 } else {
6056 __ orq(first_reg, second.AsRegister<CpuRegister>());
6057 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006058 } else {
6059 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006060 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006061 if (is_int32_value) {
6062 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6063 } else {
6064 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6065 }
6066 } else if (second.IsDoubleStackSlot()) {
6067 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006068 } else {
6069 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6070 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006071 }
6072 }
6073}
6074
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006075void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6076 Location out,
6077 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006078 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006079 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6080 if (kEmitCompilerReadBarrier) {
6081 if (kUseBakerReadBarrier) {
6082 // Load with fast path based Baker's read barrier.
6083 // /* HeapReference<Object> */ out = *(out + offset)
6084 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006085 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006086 } else {
6087 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006088 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006089 // in the following move operation, as we will need it for the
6090 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006091 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006092 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006093 // /* HeapReference<Object> */ out = *(out + offset)
6094 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006095 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006096 }
6097 } else {
6098 // Plain load with no read barrier.
6099 // /* HeapReference<Object> */ out = *(out + offset)
6100 __ movl(out_reg, Address(out_reg, offset));
6101 __ MaybeUnpoisonHeapReference(out_reg);
6102 }
6103}
6104
6105void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6106 Location out,
6107 Location obj,
Vladimir Marko953437b2016-08-24 08:30:46 +00006108 uint32_t offset) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006109 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6110 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6111 if (kEmitCompilerReadBarrier) {
6112 if (kUseBakerReadBarrier) {
6113 // Load with fast path based Baker's read barrier.
6114 // /* HeapReference<Object> */ out = *(obj + offset)
6115 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006116 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006117 } else {
6118 // Load with slow path based read barrier.
6119 // /* HeapReference<Object> */ out = *(obj + offset)
6120 __ movl(out_reg, Address(obj_reg, offset));
6121 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6122 }
6123 } else {
6124 // Plain load with no read barrier.
6125 // /* HeapReference<Object> */ out = *(obj + offset)
6126 __ movl(out_reg, Address(obj_reg, offset));
6127 __ MaybeUnpoisonHeapReference(out_reg);
6128 }
6129}
6130
6131void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6132 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006133 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006134 Label* fixup_label,
6135 bool requires_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006136 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006137 if (requires_read_barrier) {
6138 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006139 if (kUseBakerReadBarrier) {
6140 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6141 // Baker's read barrier are used:
6142 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006143 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006144 // if (Thread::Current()->GetIsGcMarking()) {
6145 // root = ReadBarrier::Mark(root)
6146 // }
6147
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006148 // /* GcRoot<mirror::Object> */ root = *address
6149 __ movl(root_reg, address);
6150 if (fixup_label != nullptr) {
6151 __ Bind(fixup_label);
6152 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006153 static_assert(
6154 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6155 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6156 "have different sizes.");
6157 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6158 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6159 "have different sizes.");
6160
Vladimir Marko953437b2016-08-24 08:30:46 +00006161 // Slow path marking the GC root `root`.
6162 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6163 instruction, root, /* unpoison */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006164 codegen_->AddSlowPath(slow_path);
6165
Andreas Gampe542451c2016-07-26 09:02:02 -07006166 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006167 /* no_rip */ true),
6168 Immediate(0));
6169 __ j(kNotEqual, slow_path->GetEntryLabel());
6170 __ Bind(slow_path->GetExitLabel());
6171 } else {
6172 // GC root loaded through a slow path for read barriers other
6173 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006174 // /* GcRoot<mirror::Object>* */ root = address
6175 __ leaq(root_reg, address);
6176 if (fixup_label != nullptr) {
6177 __ Bind(fixup_label);
6178 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006179 // /* mirror::Object* */ root = root->Read()
6180 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6181 }
6182 } else {
6183 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006184 // /* GcRoot<mirror::Object> */ root = *address
6185 __ movl(root_reg, address);
6186 if (fixup_label != nullptr) {
6187 __ Bind(fixup_label);
6188 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006189 // Note that GC roots are not affected by heap poisoning, thus we
6190 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006191 }
6192}
6193
6194void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6195 Location ref,
6196 CpuRegister obj,
6197 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006198 bool needs_null_check) {
6199 DCHECK(kEmitCompilerReadBarrier);
6200 DCHECK(kUseBakerReadBarrier);
6201
6202 // /* HeapReference<Object> */ ref = *(obj + offset)
6203 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006204 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006205}
6206
6207void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6208 Location ref,
6209 CpuRegister obj,
6210 uint32_t data_offset,
6211 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006212 bool needs_null_check) {
6213 DCHECK(kEmitCompilerReadBarrier);
6214 DCHECK(kUseBakerReadBarrier);
6215
Roland Levillain3d312422016-06-23 13:53:42 +01006216 static_assert(
6217 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6218 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006219 // /* HeapReference<Object> */ ref =
6220 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006221 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006222 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006223}
6224
6225void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6226 Location ref,
6227 CpuRegister obj,
6228 const Address& src,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006229 bool needs_null_check) {
6230 DCHECK(kEmitCompilerReadBarrier);
6231 DCHECK(kUseBakerReadBarrier);
6232
6233 // In slow path based read barriers, the read barrier call is
6234 // inserted after the original load. However, in fast path based
6235 // Baker's read barriers, we need to perform the load of
6236 // mirror::Object::monitor_ *before* the original reference load.
6237 // This load-load ordering is required by the read barrier.
6238 // The fast path/slow path (for Baker's algorithm) should look like:
6239 //
6240 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6241 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6242 // HeapReference<Object> ref = *src; // Original reference load.
6243 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6244 // if (is_gray) {
6245 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6246 // }
6247 //
6248 // Note: the original implementation in ReadBarrier::Barrier is
6249 // slightly more complex as:
6250 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006251 // the high-bits of rb_state, which are expected to be all zeroes
6252 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6253 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006254 // - it performs additional checks that we do not do here for
6255 // performance reasons.
6256
6257 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006258 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6259
Vladimir Marko953437b2016-08-24 08:30:46 +00006260 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6261 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
6262 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
6263 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
6264 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6265 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6266 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6267
6268 // if (rb_state == ReadBarrier::gray_ptr_)
6269 // ref = ReadBarrier::Mark(ref);
6270 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6271 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006272 if (needs_null_check) {
6273 MaybeRecordImplicitNullCheck(instruction);
6274 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006275
6276 // Load fence to prevent load-load reordering.
6277 // Note that this is a no-op, thanks to the x86-64 memory model.
6278 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6279
6280 // The actual reference load.
6281 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006282 __ movl(ref_reg, src); // Flags are unaffected.
6283
6284 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6285 // Slow path marking the object `ref` when it is gray.
6286 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6287 instruction, ref, /* unpoison */ true);
6288 AddSlowPath(slow_path);
6289
6290 // We have done the "if" of the gray bit check above, now branch based on the flags.
6291 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006292
6293 // Object* ref = ref_addr->AsMirrorPtr()
6294 __ MaybeUnpoisonHeapReference(ref_reg);
6295
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006296 __ Bind(slow_path->GetExitLabel());
6297}
6298
6299void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6300 Location out,
6301 Location ref,
6302 Location obj,
6303 uint32_t offset,
6304 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006305 DCHECK(kEmitCompilerReadBarrier);
6306
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006307 // Insert a slow path based read barrier *after* the reference load.
6308 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006309 // If heap poisoning is enabled, the unpoisoning of the loaded
6310 // reference will be carried out by the runtime within the slow
6311 // path.
6312 //
6313 // Note that `ref` currently does not get unpoisoned (when heap
6314 // poisoning is enabled), which is alright as the `ref` argument is
6315 // not used by the artReadBarrierSlow entry point.
6316 //
6317 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6318 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6319 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6320 AddSlowPath(slow_path);
6321
Roland Levillain0d5a2812015-11-13 10:07:31 +00006322 __ jmp(slow_path->GetEntryLabel());
6323 __ Bind(slow_path->GetExitLabel());
6324}
6325
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006326void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6327 Location out,
6328 Location ref,
6329 Location obj,
6330 uint32_t offset,
6331 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006332 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006333 // Baker's read barriers shall be handled by the fast path
6334 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6335 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006336 // If heap poisoning is enabled, unpoisoning will be taken care of
6337 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006338 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006339 } else if (kPoisonHeapReferences) {
6340 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6341 }
6342}
6343
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006344void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6345 Location out,
6346 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006347 DCHECK(kEmitCompilerReadBarrier);
6348
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006349 // Insert a slow path based read barrier *after* the GC root load.
6350 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006351 // Note that GC roots are not affected by heap poisoning, so we do
6352 // not need to do anything special for this here.
6353 SlowPathCode* slow_path =
6354 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6355 AddSlowPath(slow_path);
6356
Roland Levillain0d5a2812015-11-13 10:07:31 +00006357 __ jmp(slow_path->GetEntryLabel());
6358 __ Bind(slow_path->GetExitLabel());
6359}
6360
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006361void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006362 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006363 LOG(FATAL) << "Unreachable";
6364}
6365
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006366void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006367 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006368 LOG(FATAL) << "Unreachable";
6369}
6370
Mark Mendellfe57faa2015-09-18 09:26:15 -04006371// Simple implementation of packed switch - generate cascaded compare/jumps.
6372void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6373 LocationSummary* locations =
6374 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6375 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006376 locations->AddTemp(Location::RequiresRegister());
6377 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006378}
6379
6380void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6381 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006382 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006383 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006384 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6385 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6386 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006387 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6388
6389 // Should we generate smaller inline compare/jumps?
6390 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6391 // Figure out the correct compare values and jump conditions.
6392 // Handle the first compare/branch as a special case because it might
6393 // jump to the default case.
6394 DCHECK_GT(num_entries, 2u);
6395 Condition first_condition;
6396 uint32_t index;
6397 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6398 if (lower_bound != 0) {
6399 first_condition = kLess;
6400 __ cmpl(value_reg_in, Immediate(lower_bound));
6401 __ j(first_condition, codegen_->GetLabelOf(default_block));
6402 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6403
6404 index = 1;
6405 } else {
6406 // Handle all the compare/jumps below.
6407 first_condition = kBelow;
6408 index = 0;
6409 }
6410
6411 // Handle the rest of the compare/jumps.
6412 for (; index + 1 < num_entries; index += 2) {
6413 int32_t compare_to_value = lower_bound + index + 1;
6414 __ cmpl(value_reg_in, Immediate(compare_to_value));
6415 // Jump to successors[index] if value < case_value[index].
6416 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6417 // Jump to successors[index + 1] if value == case_value[index + 1].
6418 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6419 }
6420
6421 if (index != num_entries) {
6422 // There are an odd number of entries. Handle the last one.
6423 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006424 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006425 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6426 }
6427
6428 // And the default for any other value.
6429 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6430 __ jmp(codegen_->GetLabelOf(default_block));
6431 }
6432 return;
6433 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006434
6435 // Remove the bias, if needed.
6436 Register value_reg_out = value_reg_in.AsRegister();
6437 if (lower_bound != 0) {
6438 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6439 value_reg_out = temp_reg.AsRegister();
6440 }
6441 CpuRegister value_reg(value_reg_out);
6442
6443 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006444 __ cmpl(value_reg, Immediate(num_entries - 1));
6445 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006446
Mark Mendell9c86b482015-09-18 13:36:07 -04006447 // We are in the range of the table.
6448 // Load the address of the jump table in the constant area.
6449 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006450
Mark Mendell9c86b482015-09-18 13:36:07 -04006451 // Load the (signed) offset from the jump table.
6452 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6453
6454 // Add the offset to the address of the table base.
6455 __ addq(temp_reg, base_reg);
6456
6457 // And jump.
6458 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006459}
6460
Aart Bikc5d47542016-01-27 17:00:35 -08006461void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6462 if (value == 0) {
6463 __ xorl(dest, dest);
6464 } else {
6465 __ movl(dest, Immediate(value));
6466 }
6467}
6468
Mark Mendell92e83bf2015-05-07 11:25:03 -04006469void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6470 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006471 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006472 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006473 } else if (IsUint<32>(value)) {
6474 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006475 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6476 } else {
6477 __ movq(dest, Immediate(value));
6478 }
6479}
6480
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006481void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6482 if (value == 0) {
6483 __ xorps(dest, dest);
6484 } else {
6485 __ movss(dest, LiteralInt32Address(value));
6486 }
6487}
6488
6489void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6490 if (value == 0) {
6491 __ xorpd(dest, dest);
6492 } else {
6493 __ movsd(dest, LiteralInt64Address(value));
6494 }
6495}
6496
6497void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6498 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6499}
6500
6501void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6502 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6503}
6504
Aart Bika19616e2016-02-01 18:57:58 -08006505void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6506 if (value == 0) {
6507 __ testl(dest, dest);
6508 } else {
6509 __ cmpl(dest, Immediate(value));
6510 }
6511}
6512
6513void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6514 if (IsInt<32>(value)) {
6515 if (value == 0) {
6516 __ testq(dest, dest);
6517 } else {
6518 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6519 }
6520 } else {
6521 // Value won't fit in an int.
6522 __ cmpq(dest, LiteralInt64Address(value));
6523 }
6524}
6525
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006526void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6527 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6528 if (rhs.IsConstant()) {
6529 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
6530 Compare32BitValue(lhs_reg, value);
6531 } else if (rhs.IsStackSlot()) {
6532 __ cmpl(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6533 } else {
6534 __ cmpl(lhs_reg, rhs.AsRegister<CpuRegister>());
6535 }
6536}
6537
6538void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6539 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6540 if (rhs.IsConstant()) {
6541 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6542 Compare64BitValue(lhs_reg, value);
6543 } else if (rhs.IsDoubleStackSlot()) {
6544 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6545 } else {
6546 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6547 }
6548}
6549
6550Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6551 Location index,
6552 ScaleFactor scale,
6553 uint32_t data_offset) {
6554 return index.IsConstant() ?
6555 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6556 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6557}
6558
Mark Mendellcfa410b2015-05-25 16:02:44 -04006559void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6560 DCHECK(dest.IsDoubleStackSlot());
6561 if (IsInt<32>(value)) {
6562 // Can move directly as an int32 constant.
6563 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6564 Immediate(static_cast<int32_t>(value)));
6565 } else {
6566 Load64BitValue(CpuRegister(TMP), value);
6567 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6568 }
6569}
6570
Mark Mendell9c86b482015-09-18 13:36:07 -04006571/**
6572 * Class to handle late fixup of offsets into constant area.
6573 */
6574class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6575 public:
6576 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6577 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6578
6579 protected:
6580 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6581
6582 CodeGeneratorX86_64* codegen_;
6583
6584 private:
6585 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6586 // Patch the correct offset for the instruction. We use the address of the
6587 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6588 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6589 int32_t relative_position = constant_offset - pos;
6590
6591 // Patch in the right value.
6592 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6593 }
6594
6595 // Location in constant area that the fixup refers to.
6596 size_t offset_into_constant_area_;
6597};
6598
6599/**
6600 t * Class to handle late fixup of offsets to a jump table that will be created in the
6601 * constant area.
6602 */
6603class JumpTableRIPFixup : public RIPFixup {
6604 public:
6605 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6606 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6607
6608 void CreateJumpTable() {
6609 X86_64Assembler* assembler = codegen_->GetAssembler();
6610
6611 // Ensure that the reference to the jump table has the correct offset.
6612 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6613 SetOffset(offset_in_constant_table);
6614
6615 // Compute the offset from the start of the function to this jump table.
6616 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6617
6618 // Populate the jump table with the correct values for the jump table.
6619 int32_t num_entries = switch_instr_->GetNumEntries();
6620 HBasicBlock* block = switch_instr_->GetBlock();
6621 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6622 // The value that we want is the target offset - the position of the table.
6623 for (int32_t i = 0; i < num_entries; i++) {
6624 HBasicBlock* b = successors[i];
6625 Label* l = codegen_->GetLabelOf(b);
6626 DCHECK(l->IsBound());
6627 int32_t offset_to_block = l->Position() - current_table_offset;
6628 assembler->AppendInt32(offset_to_block);
6629 }
6630 }
6631
6632 private:
6633 const HPackedSwitch* switch_instr_;
6634};
6635
Mark Mendellf55c3e02015-03-26 21:07:46 -04006636void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6637 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006638 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006639 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6640 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006641 assembler->Align(4, 0);
6642 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006643
6644 // Populate any jump tables.
6645 for (auto jump_table : fixups_to_jump_tables_) {
6646 jump_table->CreateJumpTable();
6647 }
6648
6649 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006650 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006651 }
6652
6653 // And finish up.
6654 CodeGenerator::Finalize(allocator);
6655}
6656
Mark Mendellf55c3e02015-03-26 21:07:46 -04006657Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6658 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6659 return Address::RIP(fixup);
6660}
6661
6662Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6663 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6664 return Address::RIP(fixup);
6665}
6666
6667Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6668 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6669 return Address::RIP(fixup);
6670}
6671
6672Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6673 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6674 return Address::RIP(fixup);
6675}
6676
Andreas Gampe85b62f22015-09-09 13:15:38 -07006677// TODO: trg as memory.
6678void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6679 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006680 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006681 return;
6682 }
6683
6684 DCHECK_NE(type, Primitive::kPrimVoid);
6685
6686 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6687 if (trg.Equals(return_loc)) {
6688 return;
6689 }
6690
6691 // Let the parallel move resolver take care of all of this.
6692 HParallelMove parallel_move(GetGraph()->GetArena());
6693 parallel_move.AddMove(return_loc, trg, type, nullptr);
6694 GetMoveResolver()->EmitNativeCode(&parallel_move);
6695}
6696
Mark Mendell9c86b482015-09-18 13:36:07 -04006697Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6698 // Create a fixup to be used to create and address the jump table.
6699 JumpTableRIPFixup* table_fixup =
6700 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6701
6702 // We have to populate the jump tables.
6703 fixups_to_jump_tables_.push_back(table_fixup);
6704 return Address::RIP(table_fixup);
6705}
6706
Mark Mendellea5af682015-10-22 17:35:49 -04006707void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6708 const Address& addr_high,
6709 int64_t v,
6710 HInstruction* instruction) {
6711 if (IsInt<32>(v)) {
6712 int32_t v_32 = v;
6713 __ movq(addr_low, Immediate(v_32));
6714 MaybeRecordImplicitNullCheck(instruction);
6715 } else {
6716 // Didn't fit in a register. Do it in pieces.
6717 int32_t low_v = Low32Bits(v);
6718 int32_t high_v = High32Bits(v);
6719 __ movl(addr_low, Immediate(low_v));
6720 MaybeRecordImplicitNullCheck(instruction);
6721 __ movl(addr_high, Immediate(high_v));
6722 }
6723}
6724
Roland Levillain4d027112015-07-01 15:41:14 +01006725#undef __
6726
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006727} // namespace x86_64
6728} // namespace art