blob: 1d87bf61982e7ce32d91afc9fa9d094acfbf4400 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010023#include "gc/accounting/card_table.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024#include "intrinsics.h"
25#include "intrinsics_x86_64.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070026#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070027#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010028#include "mirror/object_reference.h"
29#include "thread.h"
30#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "utils/x86_64/assembler_x86_64.h"
33#include "utils/x86_64/managed_register_x86_64.h"
34
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010035namespace art {
36
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace x86_64 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000044// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
45// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
46// generates less code/data with a small num_entries.
47static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010048
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000049static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000050static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010051
Mark Mendell24f2dfa2015-01-14 19:51:45 -050052static constexpr int kC2ConditionMask = 0x400;
53
Roland Levillain7cbd27f2016-08-11 23:53:33 +010054// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
55#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070056#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Andreas Gampe85b62f22015-09-09 13:15:38 -070058class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010059 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000060 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Alexandre Rames2ed20af2015-03-06 13:55:35 +000062 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010064 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000065 if (instruction_->CanThrowIntoCatchBlock()) {
66 // Live registers will be restored in the catch block if caught.
67 SaveLiveRegisters(codegen, instruction_->GetLocations());
68 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010069 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000070 instruction_,
71 instruction_->GetDexPc(),
72 this);
Roland Levillain888d0672015-11-23 18:53:50 +000073 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010074 }
75
Alexandre Rames8158f282015-08-07 10:26:17 +010076 bool IsFatal() const OVERRIDE { return true; }
77
Alexandre Rames9931f312015-06-19 14:47:01 +010078 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
79
Nicolas Geoffraye5038322014-07-04 09:41:32 +010080 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
82};
83
Andreas Gampe85b62f22015-09-09 13:15:38 -070084class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000085 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000086 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000087
Alexandre Rames2ed20af2015-03-06 13:55:35 +000088 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000089 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000090 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010091 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000092 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000093 }
94
Alexandre Rames8158f282015-08-07 10:26:17 +010095 bool IsFatal() const OVERRIDE { return true; }
96
Alexandre Rames9931f312015-06-19 14:47:01 +010097 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
98
Calin Juravled0d48522014-11-04 16:40:20 +000099 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000100 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
101};
102
Andreas Gampe85b62f22015-09-09 13:15:38 -0700103class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000104 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000105 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, Primitive::Type type, bool is_div)
106 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000107
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000108 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000109 __ Bind(GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000110 if (type_ == Primitive::kPrimInt) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000111 if (is_div_) {
112 __ negl(cpu_reg_);
113 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400114 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 }
116
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000117 } else {
118 DCHECK_EQ(Primitive::kPrimLong, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 if (is_div_) {
120 __ negq(cpu_reg_);
121 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400122 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000124 }
Calin Juravled0d48522014-11-04 16:40:20 +0000125 __ jmp(GetExitLabel());
126 }
127
Alexandre Rames9931f312015-06-19 14:47:01 +0100128 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
129
Calin Juravled0d48522014-11-04 16:40:20 +0000130 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 const CpuRegister cpu_reg_;
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000132 const Primitive::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 const bool is_div_;
134 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000135};
136
Andreas Gampe85b62f22015-09-09 13:15:38 -0700137class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000138 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100139 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000140 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000141
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000142 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000143 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000144 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100145 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000146 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100147 if (successor_ == nullptr) {
148 __ jmp(GetReturnLabel());
149 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000150 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100151 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 }
153
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 Label* GetReturnLabel() {
155 DCHECK(successor_ == nullptr);
156 return &return_label_;
157 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000158
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100159 HBasicBlock* GetSuccessor() const {
160 return successor_;
161 }
162
Alexandre Rames9931f312015-06-19 14:47:01 +0100163 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
164
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 Label return_label_;
168
169 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
170};
171
Andreas Gampe85b62f22015-09-09 13:15:38 -0700172class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100173 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100174 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000175 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100178 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000179 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000181 if (instruction_->CanThrowIntoCatchBlock()) {
182 // Live registers will be restored in the catch block if caught.
183 SaveLiveRegisters(codegen, instruction_->GetLocations());
184 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400185 // Are we using an array length from memory?
186 HInstruction* array_length = instruction_->InputAt(1);
187 Location length_loc = locations->InAt(1);
188 InvokeRuntimeCallingConvention calling_convention;
189 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
190 // Load the array length into our temporary.
191 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
192 Location array_loc = array_length->GetLocations()->InAt(0);
193 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
194 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
195 // Check for conflicts with index.
196 if (length_loc.Equals(locations->InAt(0))) {
197 // We know we aren't using parameter 2.
198 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
199 }
200 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
201 }
202
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000203 // We're moving two locations to locations that could overlap, so we need a parallel
204 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000205 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100206 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000207 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100208 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400209 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100210 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
211 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100212 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
213 ? kQuickThrowStringBounds
214 : kQuickThrowArrayBounds;
215 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100216 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000217 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100218 }
219
Alexandre Rames8158f282015-08-07 10:26:17 +0100220 bool IsFatal() const OVERRIDE { return true; }
221
Alexandre Rames9931f312015-06-19 14:47:01 +0100222 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
223
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100224 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100225 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
226};
227
Andreas Gampe85b62f22015-09-09 13:15:38 -0700228class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100229 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000230 LoadClassSlowPathX86_64(HLoadClass* cls,
231 HInstruction* at,
232 uint32_t dex_pc,
233 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000234 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000235 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
236 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100237
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000238 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000239 LocationSummary* locations = at_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000240 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100241 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100242
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000243 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000244
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100245 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000246 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100247 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000248 at_,
249 dex_pc_,
250 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000251 if (do_clinit_) {
252 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
253 } else {
254 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000257 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000259 if (out.IsValid()) {
260 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000261 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 }
263
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ jmp(GetExitLabel());
266 }
267
Alexandre Rames9931f312015-06-19 14:47:01 +0100268 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
269
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100270 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000271 // The class this slow path will load.
272 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100273
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000274 // The instruction where this slow path is happening.
275 // (Might be the load class or an initialization check).
276 HInstruction* const at_;
277
278 // The dex PC of `at_`.
279 const uint32_t dex_pc_;
280
281 // Whether to initialize the class.
282 const bool do_clinit_;
283
284 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285};
286
Andreas Gampe85b62f22015-09-09 13:15:38 -0700287class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000288 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000289 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000290 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000291
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000292 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000293 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100294 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
295 : locations->Out();
296 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000297 DCHECK(instruction_->IsCheckCast()
298 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000299
Roland Levillain0d5a2812015-11-13 10:07:31 +0000300 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000301 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000302
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000303 if (!is_fatal_) {
304 SaveLiveRegisters(codegen, locations);
305 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000306
307 // We're moving two locations to locations that could overlap, so we need a parallel
308 // move resolver.
309 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000310 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100311 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000312 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100313 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100314 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100315 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
316 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000318 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100319 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000320 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700321 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000322 } else {
323 DCHECK(instruction_->IsCheckCast());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100324 x86_64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000325 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000326 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000327
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328 if (!is_fatal_) {
329 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000330 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000332
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000333 RestoreLiveRegisters(codegen, locations);
334 __ jmp(GetExitLabel());
335 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 }
337
Alexandre Rames9931f312015-06-19 14:47:01 +0100338 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
339
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000340 bool IsFatal() const OVERRIDE { return is_fatal_; }
341
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000342 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000343 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000344
345 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
346};
347
Andreas Gampe85b62f22015-09-09 13:15:38 -0700348class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700349 public:
Aart Bik42249c32016-01-07 15:33:50 -0800350 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000351 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700352
353 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000354 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700355 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100356 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000357 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700358 }
359
Alexandre Rames9931f312015-06-19 14:47:01 +0100360 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
361
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700362 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700363 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
364};
365
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100366class ArraySetSlowPathX86_64 : public SlowPathCode {
367 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100369
370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
371 LocationSummary* locations = instruction_->GetLocations();
372 __ Bind(GetEntryLabel());
373 SaveLiveRegisters(codegen, locations);
374
375 InvokeRuntimeCallingConvention calling_convention;
376 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
377 parallel_move.AddMove(
378 locations->InAt(0),
379 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
380 Primitive::kPrimNot,
381 nullptr);
382 parallel_move.AddMove(
383 locations->InAt(1),
384 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
385 Primitive::kPrimInt,
386 nullptr);
387 parallel_move.AddMove(
388 locations->InAt(2),
389 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
390 Primitive::kPrimNot,
391 nullptr);
392 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
393
Roland Levillain0d5a2812015-11-13 10:07:31 +0000394 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100395 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000396 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100397 RestoreLiveRegisters(codegen, locations);
398 __ jmp(GetExitLabel());
399 }
400
401 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
402
403 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100404 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
405};
406
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000407// Slow path marking an object during a read barrier.
408class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
409 public:
Vladimir Marko953437b2016-08-24 08:30:46 +0000410 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction, Location obj, bool unpoison)
411 : SlowPathCode(instruction), obj_(obj), unpoison_(unpoison) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000412 DCHECK(kEmitCompilerReadBarrier);
413 }
414
415 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
416
417 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
418 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain02b75802016-07-13 11:54:35 +0100419 Register reg = obj_.AsRegister<Register>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000420 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100421 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000422 DCHECK(instruction_->IsInstanceFieldGet() ||
423 instruction_->IsStaticFieldGet() ||
424 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100425 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000426 instruction_->IsLoadClass() ||
427 instruction_->IsLoadString() ||
428 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100429 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100430 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
431 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000432 << "Unexpected instruction in read barrier marking slow path: "
433 << instruction_->DebugName();
434
435 __ Bind(GetEntryLabel());
Vladimir Marko953437b2016-08-24 08:30:46 +0000436 if (unpoison_) {
437 // Object* ref = ref_addr->AsMirrorPtr()
438 __ MaybeUnpoisonHeapReference(obj_.AsRegister<CpuRegister>());
439 }
Roland Levillain4359e612016-07-20 11:32:19 +0100440 // No need to save live registers; it's taken care of by the
441 // entrypoint. Also, there is no need to update the stack mask,
442 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000443 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100444 DCHECK_NE(reg, RSP);
445 DCHECK(0 <= reg && reg < kNumberOfCpuRegisters) << reg;
446 // "Compact" slow path, saving two moves.
447 //
448 // Instead of using the standard runtime calling convention (input
449 // and output in R0):
450 //
451 // RDI <- obj
452 // RAX <- ReadBarrierMark(RDI)
453 // obj <- RAX
454 //
455 // we just use rX (the register holding `obj`) as input and output
456 // of a dedicated entrypoint:
457 //
458 // rX <- ReadBarrierMarkRegX(rX)
459 //
460 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700461 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100462 // This runtime call does not require a stack map.
463 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000464 __ jmp(GetExitLabel());
465 }
466
467 private:
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000468 const Location obj_;
Vladimir Marko953437b2016-08-24 08:30:46 +0000469 const bool unpoison_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000470
471 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
472};
473
Roland Levillain0d5a2812015-11-13 10:07:31 +0000474// Slow path generating a read barrier for a heap reference.
475class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
476 public:
477 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
478 Location out,
479 Location ref,
480 Location obj,
481 uint32_t offset,
482 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000483 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000484 out_(out),
485 ref_(ref),
486 obj_(obj),
487 offset_(offset),
488 index_(index) {
489 DCHECK(kEmitCompilerReadBarrier);
490 // If `obj` is equal to `out` or `ref`, it means the initial
491 // object has been overwritten by (or after) the heap object
492 // reference load to be instrumented, e.g.:
493 //
494 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000496 //
497 // In that case, we have lost the information about the original
498 // object, and the emitted read barrier cannot work properly.
499 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
500 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
501}
502
503 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
504 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
505 LocationSummary* locations = instruction_->GetLocations();
506 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
507 DCHECK(locations->CanCall());
508 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100509 DCHECK(instruction_->IsInstanceFieldGet() ||
510 instruction_->IsStaticFieldGet() ||
511 instruction_->IsArrayGet() ||
512 instruction_->IsInstanceOf() ||
513 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100514 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000515 << "Unexpected instruction in read barrier for heap reference slow path: "
516 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000517
518 __ Bind(GetEntryLabel());
519 SaveLiveRegisters(codegen, locations);
520
521 // We may have to change the index's value, but as `index_` is a
522 // constant member (like other "inputs" of this slow path),
523 // introduce a copy of it, `index`.
524 Location index = index_;
525 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100526 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000527 if (instruction_->IsArrayGet()) {
528 // Compute real offset and store it in index_.
529 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
530 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
531 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
532 // We are about to change the value of `index_reg` (see the
533 // calls to art::x86_64::X86_64Assembler::shll and
534 // art::x86_64::X86_64Assembler::AddImmediate below), but it
535 // has not been saved by the previous call to
536 // art::SlowPathCode::SaveLiveRegisters, as it is a
537 // callee-save register --
538 // art::SlowPathCode::SaveLiveRegisters does not consider
539 // callee-save registers, as it has been designed with the
540 // assumption that callee-save registers are supposed to be
541 // handled by the called function. So, as a callee-save
542 // register, `index_reg` _would_ eventually be saved onto
543 // the stack, but it would be too late: we would have
544 // changed its value earlier. Therefore, we manually save
545 // it here into another freely available register,
546 // `free_reg`, chosen of course among the caller-save
547 // registers (as a callee-save `free_reg` register would
548 // exhibit the same problem).
549 //
550 // Note we could have requested a temporary register from
551 // the register allocator instead; but we prefer not to, as
552 // this is a slow path, and we know we can find a
553 // caller-save register that is available.
554 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
555 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
556 index_reg = free_reg;
557 index = Location::RegisterLocation(index_reg);
558 } else {
559 // The initial register stored in `index_` has already been
560 // saved in the call to art::SlowPathCode::SaveLiveRegisters
561 // (as it is not a callee-save register), so we can freely
562 // use it.
563 }
564 // Shifting the index value contained in `index_reg` by the
565 // scale factor (2) cannot overflow in practice, as the
566 // runtime is unable to allocate object arrays with a size
567 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
568 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
569 static_assert(
570 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
571 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
572 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
573 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100574 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
575 // intrinsics, `index_` is not shifted by a scale factor of 2
576 // (as in the case of ArrayGet), as it is actually an offset
577 // to an object field within an object.
578 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000579 DCHECK(instruction_->GetLocations()->Intrinsified());
580 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
581 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
582 << instruction_->AsInvoke()->GetIntrinsic();
583 DCHECK_EQ(offset_, 0U);
584 DCHECK(index_.IsRegister());
585 }
586 }
587
588 // We're moving two or three locations to locations that could
589 // overlap, so we need a parallel move resolver.
590 InvokeRuntimeCallingConvention calling_convention;
591 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
592 parallel_move.AddMove(ref_,
593 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
594 Primitive::kPrimNot,
595 nullptr);
596 parallel_move.AddMove(obj_,
597 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
598 Primitive::kPrimNot,
599 nullptr);
600 if (index.IsValid()) {
601 parallel_move.AddMove(index,
602 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
603 Primitive::kPrimInt,
604 nullptr);
605 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
606 } else {
607 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
608 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
609 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100610 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000611 instruction_,
612 instruction_->GetDexPc(),
613 this);
614 CheckEntrypointTypes<
615 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
616 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
617
618 RestoreLiveRegisters(codegen, locations);
619 __ jmp(GetExitLabel());
620 }
621
622 const char* GetDescription() const OVERRIDE {
623 return "ReadBarrierForHeapReferenceSlowPathX86_64";
624 }
625
626 private:
627 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
628 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
629 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
630 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
631 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
632 return static_cast<CpuRegister>(i);
633 }
634 }
635 // We shall never fail to find a free caller-save register, as
636 // there are more than two core caller-save registers on x86-64
637 // (meaning it is possible to find one which is different from
638 // `ref` and `obj`).
639 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
640 LOG(FATAL) << "Could not find a free caller-save register";
641 UNREACHABLE();
642 }
643
Roland Levillain0d5a2812015-11-13 10:07:31 +0000644 const Location out_;
645 const Location ref_;
646 const Location obj_;
647 const uint32_t offset_;
648 // An additional location containing an index to an array.
649 // Only used for HArrayGet and the UnsafeGetObject &
650 // UnsafeGetObjectVolatile intrinsics.
651 const Location index_;
652
653 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
654};
655
656// Slow path generating a read barrier for a GC root.
657class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
658 public:
659 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000660 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000661 DCHECK(kEmitCompilerReadBarrier);
662 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000663
664 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
665 LocationSummary* locations = instruction_->GetLocations();
666 DCHECK(locations->CanCall());
667 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000668 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
669 << "Unexpected instruction in read barrier for GC root slow path: "
670 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000671
672 __ Bind(GetEntryLabel());
673 SaveLiveRegisters(codegen, locations);
674
675 InvokeRuntimeCallingConvention calling_convention;
676 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
677 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100678 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000679 instruction_,
680 instruction_->GetDexPc(),
681 this);
682 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
683 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
684
685 RestoreLiveRegisters(codegen, locations);
686 __ jmp(GetExitLabel());
687 }
688
689 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
690
691 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000692 const Location out_;
693 const Location root_;
694
695 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
696};
697
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100698#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100699// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
700#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100701
Roland Levillain4fa13f62015-07-06 18:11:54 +0100702inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700703 switch (cond) {
704 case kCondEQ: return kEqual;
705 case kCondNE: return kNotEqual;
706 case kCondLT: return kLess;
707 case kCondLE: return kLessEqual;
708 case kCondGT: return kGreater;
709 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700710 case kCondB: return kBelow;
711 case kCondBE: return kBelowEqual;
712 case kCondA: return kAbove;
713 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700714 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100715 LOG(FATAL) << "Unreachable";
716 UNREACHABLE();
717}
718
Aart Bike9f37602015-10-09 11:15:55 -0700719// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100720inline Condition X86_64FPCondition(IfCondition cond) {
721 switch (cond) {
722 case kCondEQ: return kEqual;
723 case kCondNE: return kNotEqual;
724 case kCondLT: return kBelow;
725 case kCondLE: return kBelowEqual;
726 case kCondGT: return kAbove;
727 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700728 default: break; // should not happen
Roland Levillain4fa13f62015-07-06 18:11:54 +0100729 };
730 LOG(FATAL) << "Unreachable";
731 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700732}
733
Vladimir Markodc151b22015-10-15 18:02:30 +0100734HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
735 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
736 MethodReference target_method ATTRIBUTE_UNUSED) {
737 switch (desired_dispatch_info.code_ptr_location) {
738 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
739 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
740 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
741 return HInvokeStaticOrDirect::DispatchInfo {
742 desired_dispatch_info.method_load_kind,
743 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
744 desired_dispatch_info.method_load_data,
745 0u
746 };
747 default:
748 return desired_dispatch_info;
749 }
750}
751
Serguei Katkov288c7a82016-05-16 11:53:15 +0600752Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
753 Location temp) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800754 // All registers are assumed to be correctly set up.
Vladimir Marko58155012015-08-19 12:49:41 +0000755 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
756 switch (invoke->GetMethodLoadKind()) {
757 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
758 // temp = thread->string_init_entrypoint
Nicolas Geoffray7f59d592015-12-29 16:20:52 +0000759 __ gs()->movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000760 Address::Absolute(invoke->GetStringInitOffset(), /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000761 break;
762 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000763 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000764 break;
765 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
766 __ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
767 break;
768 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
769 __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
770 method_patches_.emplace_back(invoke->GetTargetMethod());
771 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
772 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000773 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Vladimir Marko58155012015-08-19 12:49:41 +0000774 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000775 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000776 // Bind a new fixup label at the end of the "movl" insn.
777 uint32_t offset = invoke->GetDexCacheArrayOffset();
778 __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
Vladimir Marko58155012015-08-19 12:49:41 +0000779 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000780 }
Vladimir Marko58155012015-08-19 12:49:41 +0000781 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +0000782 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000783 Register method_reg;
784 CpuRegister reg = temp.AsRegister<CpuRegister>();
785 if (current_method.IsRegister()) {
786 method_reg = current_method.AsRegister<Register>();
787 } else {
788 DCHECK(invoke->GetLocations()->Intrinsified());
789 DCHECK(!current_method.IsValid());
790 method_reg = reg.AsRegister();
791 __ movq(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
792 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000793 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +0100794 __ movq(reg,
795 Address(CpuRegister(method_reg),
796 ArtMethod::DexCacheResolvedMethodsOffset(kX86_64PointerSize).SizeValue()));
Vladimir Marko40ecb122016-04-06 17:33:41 +0100797 // temp = temp[index_in_cache];
798 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
799 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +0000800 __ movq(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
801 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +0100802 }
Vladimir Marko58155012015-08-19 12:49:41 +0000803 }
Serguei Katkov288c7a82016-05-16 11:53:15 +0600804 return callee_method;
805}
806
807void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
808 Location temp) {
809 // All registers are assumed to be correctly set up.
810 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +0000811
812 switch (invoke->GetCodePtrLocation()) {
813 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
814 __ call(&frame_entry_label_);
815 break;
816 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
817 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
818 Label* label = &relative_call_patches_.back().label;
819 __ call(label); // Bind to the patch label, override at link time.
820 __ Bind(label); // Bind the label at the end of the "call" insn.
821 break;
822 }
823 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
824 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +0100825 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
826 LOG(FATAL) << "Unsupported";
827 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +0000828 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
829 // (callee_method + offset_of_quick_compiled_code)()
830 __ call(Address(callee_method.AsRegister<CpuRegister>(),
831 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700832 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +0000833 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000834 }
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800835
836 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800837}
838
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000839void CodeGeneratorX86_64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
840 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
841 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
842 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000843
844 // Use the calling convention instead of the location of the receiver, as
845 // intrinsics may have put the receiver in a different register. In the intrinsics
846 // slow path, the arguments have been moved to the right place, so here we are
847 // guaranteed that the receiver is the first register of the calling convention.
848 InvokeDexCallingConvention calling_convention;
849 Register receiver = calling_convention.GetRegisterAt(0);
850
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000851 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000852 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000853 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000854 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000855 // Instead of simply (possibly) unpoisoning `temp` here, we should
856 // emit a read barrier for the previous class reference load.
857 // However this is not required in practice, as this is an
858 // intermediate/temporary reference and because the current
859 // concurrent copying collector keeps the from-space memory
860 // intact/accessible until the end of the marking phase (the
861 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000862 __ MaybeUnpoisonHeapReference(temp);
863 // temp = temp->GetMethodAt(method_offset);
864 __ movq(temp, Address(temp, method_offset));
865 // call temp->GetEntryPoint();
866 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -0700867 kX86_64PointerSize).SizeValue()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000868}
869
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000870void CodeGeneratorX86_64::RecordSimplePatch() {
871 if (GetCompilerOptions().GetIncludePatchInformation()) {
872 simple_patches_.emplace_back();
873 __ Bind(&simple_patches_.back());
874 }
875}
876
877void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) {
878 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
879 __ Bind(&string_patches_.back().label);
880}
881
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100882void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
883 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
884 __ Bind(&type_patches_.back().label);
885}
886
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000887Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
888 uint32_t element_offset) {
889 // Add a patch entry and return the label.
890 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
891 return &pc_relative_dex_cache_patches_.back().label;
892}
893
Vladimir Marko58155012015-08-19 12:49:41 +0000894void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
895 DCHECK(linker_patches->empty());
896 size_t size =
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000897 method_patches_.size() +
898 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000899 pc_relative_dex_cache_patches_.size() +
900 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100901 string_patches_.size() +
902 type_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +0000903 linker_patches->reserve(size);
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000904 // The label points to the end of the "movl" insn but the literal offset for method
905 // patch needs to point to the embedded constant which occupies the last 4 bytes.
906 constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
Vladimir Marko58155012015-08-19 12:49:41 +0000907 for (const MethodPatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000908 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000909 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
910 info.target_method.dex_file,
911 info.target_method.dex_method_index));
912 }
913 for (const MethodPatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000914 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000915 linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset,
916 info.target_method.dex_file,
917 info.target_method.dex_method_index));
918 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000919 for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) {
920 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Marko58155012015-08-19 12:49:41 +0000921 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset,
922 &info.target_dex_file,
923 info.label.Position(),
924 info.element_offset));
925 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000926 for (const Label& label : simple_patches_) {
927 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
928 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
929 }
930 for (const StringPatchInfo<Label>& info : string_patches_) {
931 // These are always PC-relative, see GetSupportedLoadStringKind().
932 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
933 linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset,
934 &info.dex_file,
935 info.label.Position(),
936 info.string_index));
937 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100938 for (const TypePatchInfo<Label>& info : type_patches_) {
939 // These are always PC-relative, see GetSupportedLoadClassKind().
940 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
941 linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset,
942 &info.dex_file,
943 info.label.Position(),
944 info.type_index));
945 }
Vladimir Marko58155012015-08-19 12:49:41 +0000946}
947
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100948void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100949 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100950}
951
952void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100953 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +0100954}
955
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100956size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
957 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
958 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100959}
960
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100961size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
962 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
963 return kX86_64WordSize;
964}
965
966size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
967 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
968 return kX86_64WordSize;
969}
970
971size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
972 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
973 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100974}
975
Calin Juravle175dc732015-08-25 15:42:32 +0100976void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
977 HInstruction* instruction,
978 uint32_t dex_pc,
979 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100980 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100981 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
982 if (EntrypointRequiresStackMap(entrypoint)) {
983 RecordPcInfo(instruction, dex_pc, slow_path);
984 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100985}
986
Roland Levillaindec8f632016-07-22 17:10:06 +0100987void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
988 HInstruction* instruction,
989 SlowPathCode* slow_path) {
990 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100991 GenerateInvokeRuntime(entry_point_offset);
992}
993
994void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +0100995 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
996}
997
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000998static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +0000999// Use a fake return address register to mimic Quick.
1000static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001001CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001002 const X86_64InstructionSetFeatures& isa_features,
1003 const CompilerOptions& compiler_options,
1004 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001005 : CodeGenerator(graph,
1006 kNumberOfCpuRegisters,
1007 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001008 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001009 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1010 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001011 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001012 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1013 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001014 compiler_options,
1015 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001016 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001017 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001018 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001019 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001020 assembler_(graph->GetArena()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001021 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001022 constant_area_start_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +01001023 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1024 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001025 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001026 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1027 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001028 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell9c86b482015-09-18 13:36:07 -04001029 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001030 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1031}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001032
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001033InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1034 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001035 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001036 assembler_(codegen->GetAssembler()),
1037 codegen_(codegen) {}
1038
David Brazdil58282f42016-01-14 12:45:10 +00001039void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001040 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001041 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001042
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001043 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001044 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001045}
1046
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001047static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001048 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001049}
David Srbecky9d8606d2015-04-12 09:35:32 +01001050
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001051static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001052 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001053}
1054
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001055void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001056 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001057 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001058 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001059 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001060 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001061
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001062 if (!skip_overflow_check) {
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001063 __ testq(CpuRegister(RAX), Address(
1064 CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001065 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001066 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001067
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001068 if (HasEmptyFrame()) {
1069 return;
1070 }
1071
Nicolas Geoffray98893962015-01-21 12:32:32 +00001072 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001073 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001074 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001075 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001076 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1077 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001078 }
1079 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001080
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001081 int adjust = GetFrameSize() - GetCoreSpillSize();
1082 __ subq(CpuRegister(RSP), Immediate(adjust));
1083 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001084 uint32_t xmm_spill_location = GetFpuSpillStart();
1085 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001086
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001087 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1088 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001089 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1090 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1091 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001092 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001093 }
1094
Mathieu Chartiere401d142015-04-22 13:56:20 -07001095 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01001096 CpuRegister(kMethodRegisterArgument));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001097}
1098
1099void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001100 __ cfi().RememberState();
1101 if (!HasEmptyFrame()) {
1102 uint32_t xmm_spill_location = GetFpuSpillStart();
1103 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1104 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1105 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1106 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1107 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1108 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1109 }
1110 }
1111
1112 int adjust = GetFrameSize() - GetCoreSpillSize();
1113 __ addq(CpuRegister(RSP), Immediate(adjust));
1114 __ cfi().AdjustCFAOffset(-adjust);
1115
1116 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1117 Register reg = kCoreCalleeSaves[i];
1118 if (allocated_registers_.ContainsCoreRegister(reg)) {
1119 __ popq(CpuRegister(reg));
1120 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1121 __ cfi().Restore(DWARFReg(reg));
1122 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001123 }
1124 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001125 __ ret();
1126 __ cfi().RestoreState();
1127 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001128}
1129
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001130void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1131 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001132}
1133
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001134void CodeGeneratorX86_64::Move(Location destination, Location source) {
1135 if (source.Equals(destination)) {
1136 return;
1137 }
1138 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001139 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001140 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001141 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001142 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001143 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001144 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001145 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1146 } else if (source.IsConstant()) {
1147 HConstant* constant = source.GetConstant();
1148 if (constant->IsLongConstant()) {
1149 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1150 } else {
1151 Load32BitValue(dest, GetInt32ValueOf(constant));
1152 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001153 } else {
1154 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001155 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001156 }
1157 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001158 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001159 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001160 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001161 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001162 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1163 } else if (source.IsConstant()) {
1164 HConstant* constant = source.GetConstant();
1165 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1166 if (constant->IsFloatConstant()) {
1167 Load32BitValue(dest, static_cast<int32_t>(value));
1168 } else {
1169 Load64BitValue(dest, value);
1170 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001171 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001172 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001173 } else {
1174 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001175 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001176 }
1177 } else if (destination.IsStackSlot()) {
1178 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001179 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001180 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001181 } else if (source.IsFpuRegister()) {
1182 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001183 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001184 } else if (source.IsConstant()) {
1185 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001186 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001187 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001188 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001189 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001190 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1191 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001192 }
1193 } else {
1194 DCHECK(destination.IsDoubleStackSlot());
1195 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001196 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001197 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001198 } else if (source.IsFpuRegister()) {
1199 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001200 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001201 } else if (source.IsConstant()) {
1202 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001203 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1204 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001205 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001206 } else {
1207 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001208 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1209 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001210 }
1211 }
1212}
1213
Calin Juravle175dc732015-08-25 15:42:32 +01001214void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1215 DCHECK(location.IsRegister());
1216 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1217}
1218
Calin Juravlee460d1d2015-09-29 04:52:17 +01001219void CodeGeneratorX86_64::MoveLocation(
1220 Location dst, Location src, Primitive::Type dst_type ATTRIBUTE_UNUSED) {
1221 Move(dst, src);
1222}
1223
1224void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1225 if (location.IsRegister()) {
1226 locations->AddTemp(location);
1227 } else {
1228 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1229 }
1230}
1231
David Brazdilfc6a86a2015-06-26 10:33:45 +00001232void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001233 DCHECK(!successor->IsExitBlock());
1234
1235 HBasicBlock* block = got->GetBlock();
1236 HInstruction* previous = got->GetPrevious();
1237
1238 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001239 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001240 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1241 return;
1242 }
1243
1244 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1245 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1246 }
1247 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001248 __ jmp(codegen_->GetLabelOf(successor));
1249 }
1250}
1251
David Brazdilfc6a86a2015-06-26 10:33:45 +00001252void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1253 got->SetLocations(nullptr);
1254}
1255
1256void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1257 HandleGoto(got, got->GetSuccessor());
1258}
1259
1260void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1261 try_boundary->SetLocations(nullptr);
1262}
1263
1264void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1265 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1266 if (!successor->IsExitBlock()) {
1267 HandleGoto(try_boundary, successor);
1268 }
1269}
1270
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001271void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1272 exit->SetLocations(nullptr);
1273}
1274
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001275void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001276}
1277
Mark Mendell152408f2015-12-31 12:28:50 -05001278template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001279void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001280 LabelType* true_label,
1281 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001282 if (cond->IsFPConditionTrueIfNaN()) {
1283 __ j(kUnordered, true_label);
1284 } else if (cond->IsFPConditionFalseIfNaN()) {
1285 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001286 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001287 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001288}
1289
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001290void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001291 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001292
Mark Mendellc4701932015-04-10 13:18:51 -04001293 Location left = locations->InAt(0);
1294 Location right = locations->InAt(1);
Mark Mendellc4701932015-04-10 13:18:51 -04001295 Primitive::Type type = condition->InputAt(0)->GetType();
1296 switch (type) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001297 case Primitive::kPrimBoolean:
1298 case Primitive::kPrimByte:
1299 case Primitive::kPrimChar:
1300 case Primitive::kPrimShort:
1301 case Primitive::kPrimInt:
1302 case Primitive::kPrimNot: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001303 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001304 break;
1305 }
Mark Mendellc4701932015-04-10 13:18:51 -04001306 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001307 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001308 break;
1309 }
1310 case Primitive::kPrimFloat: {
1311 if (right.IsFpuRegister()) {
1312 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1313 } else if (right.IsConstant()) {
1314 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1315 codegen_->LiteralFloatAddress(
1316 right.GetConstant()->AsFloatConstant()->GetValue()));
1317 } else {
1318 DCHECK(right.IsStackSlot());
1319 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1320 Address(CpuRegister(RSP), right.GetStackIndex()));
1321 }
Mark Mendellc4701932015-04-10 13:18:51 -04001322 break;
1323 }
1324 case Primitive::kPrimDouble: {
1325 if (right.IsFpuRegister()) {
1326 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1327 } else if (right.IsConstant()) {
1328 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1329 codegen_->LiteralDoubleAddress(
1330 right.GetConstant()->AsDoubleConstant()->GetValue()));
1331 } else {
1332 DCHECK(right.IsDoubleStackSlot());
1333 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1334 Address(CpuRegister(RSP), right.GetStackIndex()));
1335 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001336 break;
1337 }
1338 default:
1339 LOG(FATAL) << "Unexpected condition type " << type;
1340 }
1341}
1342
1343template<class LabelType>
1344void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1345 LabelType* true_target_in,
1346 LabelType* false_target_in) {
1347 // Generated branching requires both targets to be explicit. If either of the
1348 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1349 LabelType fallthrough_target;
1350 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1351 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1352
1353 // Generate the comparison to set the CC.
1354 GenerateCompareTest(condition);
1355
1356 // Now generate the correct jump(s).
1357 Primitive::Type type = condition->InputAt(0)->GetType();
1358 switch (type) {
1359 case Primitive::kPrimLong: {
1360 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1361 break;
1362 }
1363 case Primitive::kPrimFloat: {
1364 GenerateFPJumps(condition, true_target, false_target);
1365 break;
1366 }
1367 case Primitive::kPrimDouble: {
Mark Mendellc4701932015-04-10 13:18:51 -04001368 GenerateFPJumps(condition, true_target, false_target);
1369 break;
1370 }
1371 default:
1372 LOG(FATAL) << "Unexpected condition type " << type;
1373 }
1374
David Brazdil0debae72015-11-12 18:37:00 +00001375 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001376 __ jmp(false_target);
1377 }
David Brazdil0debae72015-11-12 18:37:00 +00001378
1379 if (fallthrough_target.IsLinked()) {
1380 __ Bind(&fallthrough_target);
1381 }
Mark Mendellc4701932015-04-10 13:18:51 -04001382}
1383
David Brazdil0debae72015-11-12 18:37:00 +00001384static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1385 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1386 // are set only strictly before `branch`. We can't use the eflags on long
1387 // conditions if they are materialized due to the complex branching.
1388 return cond->IsCondition() &&
1389 cond->GetNext() == branch &&
1390 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1391}
1392
Mark Mendell152408f2015-12-31 12:28:50 -05001393template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001394void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001395 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001396 LabelType* true_target,
1397 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001398 HInstruction* cond = instruction->InputAt(condition_input_index);
1399
1400 if (true_target == nullptr && false_target == nullptr) {
1401 // Nothing to do. The code always falls through.
1402 return;
1403 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001404 // Constant condition, statically compared against "true" (integer value 1).
1405 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001406 if (true_target != nullptr) {
1407 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001408 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001409 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001410 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001411 if (false_target != nullptr) {
1412 __ jmp(false_target);
1413 }
1414 }
1415 return;
1416 }
1417
1418 // The following code generates these patterns:
1419 // (1) true_target == nullptr && false_target != nullptr
1420 // - opposite condition true => branch to false_target
1421 // (2) true_target != nullptr && false_target == nullptr
1422 // - condition true => branch to true_target
1423 // (3) true_target != nullptr && false_target != nullptr
1424 // - condition true => branch to true_target
1425 // - branch to false_target
1426 if (IsBooleanValueOrMaterializedCondition(cond)) {
1427 if (AreEflagsSetFrom(cond, instruction)) {
1428 if (true_target == nullptr) {
1429 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1430 } else {
1431 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1432 }
1433 } else {
1434 // Materialized condition, compare against 0.
1435 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1436 if (lhs.IsRegister()) {
1437 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1438 } else {
1439 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1440 }
1441 if (true_target == nullptr) {
1442 __ j(kEqual, false_target);
1443 } else {
1444 __ j(kNotEqual, true_target);
1445 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001446 }
1447 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001448 // Condition has not been materialized, use its inputs as the
1449 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001450 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001451
David Brazdil0debae72015-11-12 18:37:00 +00001452 // If this is a long or FP comparison that has been folded into
1453 // the HCondition, generate the comparison directly.
1454 Primitive::Type type = condition->InputAt(0)->GetType();
1455 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1456 GenerateCompareTestAndBranch(condition, true_target, false_target);
1457 return;
1458 }
1459
1460 Location lhs = condition->GetLocations()->InAt(0);
1461 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001462 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001463 if (true_target == nullptr) {
1464 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1465 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001466 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001467 }
Dave Allison20dfc792014-06-16 20:44:29 -07001468 }
David Brazdil0debae72015-11-12 18:37:00 +00001469
1470 // If neither branch falls through (case 3), the conditional branch to `true_target`
1471 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1472 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001473 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001474 }
1475}
1476
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001477void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001478 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1479 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001480 locations->SetInAt(0, Location::Any());
1481 }
1482}
1483
1484void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001485 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1486 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1487 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1488 nullptr : codegen_->GetLabelOf(true_successor);
1489 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1490 nullptr : codegen_->GetLabelOf(false_successor);
1491 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001492}
1493
1494void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
1495 LocationSummary* locations = new (GetGraph()->GetArena())
1496 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001497 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001498 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001499 locations->SetInAt(0, Location::Any());
1500 }
1501}
1502
1503void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001504 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001505 GenerateTestAndBranch<Label>(deoptimize,
1506 /* condition_input_index */ 0,
1507 slow_path->GetEntryLabel(),
1508 /* false_target */ nullptr);
1509}
1510
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001511static bool SelectCanUseCMOV(HSelect* select) {
1512 // There are no conditional move instructions for XMMs.
1513 if (Primitive::IsFloatingPointType(select->GetType())) {
1514 return false;
1515 }
1516
1517 // A FP condition doesn't generate the single CC that we need.
1518 HInstruction* condition = select->GetCondition();
1519 if (condition->IsCondition() &&
1520 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType())) {
1521 return false;
1522 }
1523
1524 // We can generate a CMOV for this Select.
1525 return true;
1526}
1527
David Brazdil74eb1b22015-12-14 11:44:01 +00001528void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
1529 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
1530 if (Primitive::IsFloatingPointType(select->GetType())) {
1531 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001532 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001533 } else {
1534 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001535 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001536 if (select->InputAt(1)->IsConstant()) {
1537 locations->SetInAt(1, Location::RequiresRegister());
1538 } else {
1539 locations->SetInAt(1, Location::Any());
1540 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001541 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001542 locations->SetInAt(1, Location::Any());
1543 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001544 }
1545 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1546 locations->SetInAt(2, Location::RequiresRegister());
1547 }
1548 locations->SetOut(Location::SameAsFirstInput());
1549}
1550
1551void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1552 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001553 if (SelectCanUseCMOV(select)) {
1554 // If both the condition and the source types are integer, we can generate
1555 // a CMOV to implement Select.
1556 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001557 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001558 DCHECK(locations->InAt(0).Equals(locations->Out()));
1559
1560 HInstruction* select_condition = select->GetCondition();
1561 Condition cond = kNotEqual;
1562
1563 // Figure out how to test the 'condition'.
1564 if (select_condition->IsCondition()) {
1565 HCondition* condition = select_condition->AsCondition();
1566 if (!condition->IsEmittedAtUseSite()) {
1567 // This was a previously materialized condition.
1568 // Can we use the existing condition code?
1569 if (AreEflagsSetFrom(condition, select)) {
1570 // Materialization was the previous instruction. Condition codes are right.
1571 cond = X86_64IntegerCondition(condition->GetCondition());
1572 } else {
1573 // No, we have to recreate the condition code.
1574 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1575 __ testl(cond_reg, cond_reg);
1576 }
1577 } else {
1578 GenerateCompareTest(condition);
1579 cond = X86_64IntegerCondition(condition->GetCondition());
1580 }
1581 } else {
1582 // Must be a boolean condition, which needs to be compared to 0.
1583 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1584 __ testl(cond_reg, cond_reg);
1585 }
1586
1587 // If the condition is true, overwrite the output, which already contains false.
1588 // Generate the correct sized CMOV.
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001589 bool is_64_bit = Primitive::Is64BitType(select->GetType());
1590 if (value_true_loc.IsRegister()) {
1591 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1592 } else {
1593 __ cmov(cond,
1594 value_false,
1595 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1596 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001597 } else {
1598 NearLabel false_target;
1599 GenerateTestAndBranch<NearLabel>(select,
1600 /* condition_input_index */ 2,
1601 /* true_target */ nullptr,
1602 &false_target);
1603 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1604 __ Bind(&false_target);
1605 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001606}
1607
David Srbecky0cf44932015-12-09 14:09:59 +00001608void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1609 new (GetGraph()->GetArena()) LocationSummary(info);
1610}
1611
David Srbeckyd28f4a02016-03-14 17:14:24 +00001612void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1613 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001614}
1615
1616void CodeGeneratorX86_64::GenerateNop() {
1617 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001618}
1619
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001620void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001621 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001622 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001623 // Handle the long/FP comparisons made in instruction simplification.
1624 switch (cond->InputAt(0)->GetType()) {
1625 case Primitive::kPrimLong:
1626 locations->SetInAt(0, Location::RequiresRegister());
1627 locations->SetInAt(1, Location::Any());
1628 break;
1629 case Primitive::kPrimFloat:
1630 case Primitive::kPrimDouble:
1631 locations->SetInAt(0, Location::RequiresFpuRegister());
1632 locations->SetInAt(1, Location::Any());
1633 break;
1634 default:
1635 locations->SetInAt(0, Location::RequiresRegister());
1636 locations->SetInAt(1, Location::Any());
1637 break;
1638 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001639 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001640 locations->SetOut(Location::RequiresRegister());
1641 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001642}
1643
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001644void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001645 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001646 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001647 }
Mark Mendellc4701932015-04-10 13:18:51 -04001648
1649 LocationSummary* locations = cond->GetLocations();
1650 Location lhs = locations->InAt(0);
1651 Location rhs = locations->InAt(1);
1652 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001653 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001654
1655 switch (cond->InputAt(0)->GetType()) {
1656 default:
1657 // Integer case.
1658
1659 // Clear output register: setcc only sets the low byte.
1660 __ xorl(reg, reg);
1661
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001662 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001663 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001664 return;
1665 case Primitive::kPrimLong:
1666 // Clear output register: setcc only sets the low byte.
1667 __ xorl(reg, reg);
1668
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001669 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001670 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001671 return;
1672 case Primitive::kPrimFloat: {
1673 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1674 if (rhs.IsConstant()) {
1675 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1676 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1677 } else if (rhs.IsStackSlot()) {
1678 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1679 } else {
1680 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1681 }
1682 GenerateFPJumps(cond, &true_label, &false_label);
1683 break;
1684 }
1685 case Primitive::kPrimDouble: {
1686 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1687 if (rhs.IsConstant()) {
1688 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1689 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1690 } else if (rhs.IsDoubleStackSlot()) {
1691 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1692 } else {
1693 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1694 }
1695 GenerateFPJumps(cond, &true_label, &false_label);
1696 break;
1697 }
1698 }
1699
1700 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001701 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001702
Roland Levillain4fa13f62015-07-06 18:11:54 +01001703 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001704 __ Bind(&false_label);
1705 __ xorl(reg, reg);
1706 __ jmp(&done_label);
1707
Roland Levillain4fa13f62015-07-06 18:11:54 +01001708 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001709 __ Bind(&true_label);
1710 __ movl(reg, Immediate(1));
1711 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001712}
1713
1714void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001715 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001716}
1717
1718void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001719 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001720}
1721
1722void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001723 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001724}
1725
1726void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001727 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001728}
1729
1730void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001731 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001732}
1733
1734void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001735 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001736}
1737
1738void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001739 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001740}
1741
1742void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001743 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001744}
1745
1746void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001747 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001748}
1749
1750void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001751 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001752}
1753
1754void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001755 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001756}
1757
1758void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001759 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001760}
1761
Aart Bike9f37602015-10-09 11:15:55 -07001762void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001763 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001764}
1765
1766void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001767 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001768}
1769
1770void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001771 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001772}
1773
1774void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001775 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001776}
1777
1778void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001779 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001780}
1781
1782void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001783 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001784}
1785
1786void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001787 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001788}
1789
1790void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001791 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001792}
1793
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001794void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001795 LocationSummary* locations =
1796 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00001797 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001798 case Primitive::kPrimBoolean:
1799 case Primitive::kPrimByte:
1800 case Primitive::kPrimShort:
1801 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001802 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00001803 case Primitive::kPrimLong: {
1804 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001805 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001806 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1807 break;
1808 }
1809 case Primitive::kPrimFloat:
1810 case Primitive::kPrimDouble: {
1811 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04001812 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00001813 locations->SetOut(Location::RequiresRegister());
1814 break;
1815 }
1816 default:
1817 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
1818 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001819}
1820
1821void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001822 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00001823 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00001824 Location left = locations->InAt(0);
1825 Location right = locations->InAt(1);
1826
Mark Mendell0c9497d2015-08-21 09:30:05 -04001827 NearLabel less, greater, done;
Calin Juravleddb7df22014-11-25 20:56:51 +00001828 Primitive::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08001829 Condition less_cond = kLess;
1830
Calin Juravleddb7df22014-11-25 20:56:51 +00001831 switch (type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001832 case Primitive::kPrimBoolean:
1833 case Primitive::kPrimByte:
1834 case Primitive::kPrimShort:
1835 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001836 case Primitive::kPrimInt: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001837 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08001838 break;
1839 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001840 case Primitive::kPrimLong: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001841 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001842 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00001843 }
1844 case Primitive::kPrimFloat: {
Mark Mendell40741f32015-04-20 22:10:34 -04001845 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1846 if (right.IsConstant()) {
1847 float value = right.GetConstant()->AsFloatConstant()->GetValue();
1848 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
1849 } else if (right.IsStackSlot()) {
1850 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1851 } else {
1852 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
1853 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001854 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001855 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001856 break;
1857 }
1858 case Primitive::kPrimDouble: {
Mark Mendell40741f32015-04-20 22:10:34 -04001859 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
1860 if (right.IsConstant()) {
1861 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
1862 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
1863 } else if (right.IsDoubleStackSlot()) {
1864 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
1865 } else {
1866 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
1867 }
Calin Juravleddb7df22014-11-25 20:56:51 +00001868 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08001869 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00001870 break;
1871 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001872 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00001873 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001874 }
Aart Bika19616e2016-02-01 18:57:58 -08001875
Calin Juravleddb7df22014-11-25 20:56:51 +00001876 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00001877 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08001878 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00001879
Calin Juravle91debbc2014-11-26 19:01:09 +00001880 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00001881 __ movl(out, Immediate(1));
1882 __ jmp(&done);
1883
1884 __ Bind(&less);
1885 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01001886
1887 __ Bind(&done);
1888}
1889
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001890void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001891 LocationSummary* locations =
1892 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001893 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001894}
1895
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001896void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001897 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001898}
1899
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001900void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
1901 LocationSummary* locations =
1902 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1903 locations->SetOut(Location::ConstantLocation(constant));
1904}
1905
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001906void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001907 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001908}
1909
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001910void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001911 LocationSummary* locations =
1912 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001913 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001914}
1915
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001916void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001917 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001918}
1919
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001920void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
1921 LocationSummary* locations =
1922 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1923 locations->SetOut(Location::ConstantLocation(constant));
1924}
1925
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001926void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001927 // Will be generated at use site.
1928}
1929
1930void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
1931 LocationSummary* locations =
1932 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1933 locations->SetOut(Location::ConstantLocation(constant));
1934}
1935
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001936void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
1937 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001938 // Will be generated at use site.
1939}
1940
Calin Juravle27df7582015-04-17 19:12:31 +01001941void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
1942 memory_barrier->SetLocations(nullptr);
1943}
1944
1945void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001946 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01001947}
1948
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001949void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
1950 ret->SetLocations(nullptr);
1951}
1952
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001953void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001954 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001955}
1956
1957void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001958 LocationSummary* locations =
1959 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001960 switch (ret->InputAt(0)->GetType()) {
1961 case Primitive::kPrimBoolean:
1962 case Primitive::kPrimByte:
1963 case Primitive::kPrimChar:
1964 case Primitive::kPrimShort:
1965 case Primitive::kPrimInt:
1966 case Primitive::kPrimNot:
1967 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001968 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001969 break;
1970
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001971 case Primitive::kPrimFloat:
1972 case Primitive::kPrimDouble:
Mark Mendell40741f32015-04-20 22:10:34 -04001973 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001974 break;
1975
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001976 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001977 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001978 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001979}
1980
1981void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
1982 if (kIsDebugBuild) {
1983 switch (ret->InputAt(0)->GetType()) {
1984 case Primitive::kPrimBoolean:
1985 case Primitive::kPrimByte:
1986 case Primitive::kPrimChar:
1987 case Primitive::kPrimShort:
1988 case Primitive::kPrimInt:
1989 case Primitive::kPrimNot:
1990 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00001991 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001992 break;
1993
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001994 case Primitive::kPrimFloat:
1995 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00001996 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001997 XMM0);
1998 break;
1999
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002000 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002001 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002002 }
2003 }
2004 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002005}
2006
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002007Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(Primitive::Type type) const {
2008 switch (type) {
2009 case Primitive::kPrimBoolean:
2010 case Primitive::kPrimByte:
2011 case Primitive::kPrimChar:
2012 case Primitive::kPrimShort:
2013 case Primitive::kPrimInt:
2014 case Primitive::kPrimNot:
2015 case Primitive::kPrimLong:
2016 return Location::RegisterLocation(RAX);
2017
2018 case Primitive::kPrimVoid:
2019 return Location::NoLocation();
2020
2021 case Primitive::kPrimDouble:
2022 case Primitive::kPrimFloat:
2023 return Location::FpuRegisterLocation(XMM0);
2024 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002025
2026 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002027}
2028
2029Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2030 return Location::RegisterLocation(kMethodRegisterArgument);
2031}
2032
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002033Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(Primitive::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002034 switch (type) {
2035 case Primitive::kPrimBoolean:
2036 case Primitive::kPrimByte:
2037 case Primitive::kPrimChar:
2038 case Primitive::kPrimShort:
2039 case Primitive::kPrimInt:
2040 case Primitive::kPrimNot: {
2041 uint32_t index = gp_index_++;
2042 stack_index_++;
2043 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002044 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002045 } else {
2046 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2047 }
2048 }
2049
2050 case Primitive::kPrimLong: {
2051 uint32_t index = gp_index_;
2052 stack_index_ += 2;
2053 if (index < calling_convention.GetNumberOfRegisters()) {
2054 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002055 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002056 } else {
2057 gp_index_ += 2;
2058 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2059 }
2060 }
2061
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002062 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002063 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002064 stack_index_++;
2065 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002066 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002067 } else {
2068 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2069 }
2070 }
2071
2072 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002073 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002074 stack_index_ += 2;
2075 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002076 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002077 } else {
2078 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2079 }
2080 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002081
2082 case Primitive::kPrimVoid:
2083 LOG(FATAL) << "Unexpected parameter type " << type;
2084 break;
2085 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002086 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002087}
2088
Calin Juravle175dc732015-08-25 15:42:32 +01002089void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2090 // The trampoline uses the same calling convention as dex calling conventions,
2091 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2092 // the method_idx.
2093 HandleInvoke(invoke);
2094}
2095
2096void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2097 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2098}
2099
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002100void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002101 // Explicit clinit checks triggered by static invokes must have been pruned by
2102 // art::PrepareForRegisterAllocation.
2103 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002104
Mark Mendellfb8d2792015-03-31 22:16:59 -04002105 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002106 if (intrinsic.TryDispatch(invoke)) {
2107 return;
2108 }
2109
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002110 HandleInvoke(invoke);
2111}
2112
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002113static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2114 if (invoke->GetLocations()->Intrinsified()) {
2115 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2116 intrinsic.Dispatch(invoke);
2117 return true;
2118 }
2119 return false;
2120}
2121
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002122void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002123 // Explicit clinit checks triggered by static invokes must have been pruned by
2124 // art::PrepareForRegisterAllocation.
2125 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002126
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002127 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2128 return;
2129 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002130
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002131 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002132 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002133 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002134 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002135}
2136
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002137void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002138 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002139 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002140}
2141
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002142void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002143 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002144 if (intrinsic.TryDispatch(invoke)) {
2145 return;
2146 }
2147
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002148 HandleInvoke(invoke);
2149}
2150
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002151void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002152 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2153 return;
2154 }
2155
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002156 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002157 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002158 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002159}
2160
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002161void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2162 HandleInvoke(invoke);
2163 // Add the hidden argument.
2164 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2165}
2166
2167void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2168 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002169 LocationSummary* locations = invoke->GetLocations();
2170 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2171 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002172 Location receiver = locations->InAt(0);
2173 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2174
Roland Levillain0d5a2812015-11-13 10:07:31 +00002175 // Set the hidden argument. This is safe to do this here, as RAX
2176 // won't be modified thereafter, before the `call` instruction.
2177 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002178 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002179
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002180 if (receiver.IsStackSlot()) {
2181 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002182 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002183 __ movl(temp, Address(temp, class_offset));
2184 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002185 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002186 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002187 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002188 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002189 // Instead of simply (possibly) unpoisoning `temp` here, we should
2190 // emit a read barrier for the previous class reference load.
2191 // However this is not required in practice, as this is an
2192 // intermediate/temporary reference and because the current
2193 // concurrent copying collector keeps the from-space memory
2194 // intact/accessible until the end of the marking phase (the
2195 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002196 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002197 // temp = temp->GetAddressOfIMT()
2198 __ movq(temp,
2199 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2200 // temp = temp->GetImtEntryAt(method_offset);
2201 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002202 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002203 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002204 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002205 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002206 __ call(Address(
2207 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002208
2209 DCHECK(!codegen_->IsLeafMethod());
2210 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2211}
2212
Roland Levillain88cb1752014-10-20 16:36:47 +01002213void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2214 LocationSummary* locations =
2215 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2216 switch (neg->GetResultType()) {
2217 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002218 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002219 locations->SetInAt(0, Location::RequiresRegister());
2220 locations->SetOut(Location::SameAsFirstInput());
2221 break;
2222
Roland Levillain88cb1752014-10-20 16:36:47 +01002223 case Primitive::kPrimFloat:
2224 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002225 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002226 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002227 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002228 break;
2229
2230 default:
2231 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2232 }
2233}
2234
2235void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2236 LocationSummary* locations = neg->GetLocations();
2237 Location out = locations->Out();
2238 Location in = locations->InAt(0);
2239 switch (neg->GetResultType()) {
2240 case Primitive::kPrimInt:
2241 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002242 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002243 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002244 break;
2245
2246 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002247 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002248 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002249 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002250 break;
2251
Roland Levillain5368c212014-11-27 15:03:41 +00002252 case Primitive::kPrimFloat: {
2253 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002254 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002255 // Implement float negation with an exclusive or with value
2256 // 0x80000000 (mask for bit 31, representing the sign of a
2257 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002258 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002259 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002260 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002261 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002262
Roland Levillain5368c212014-11-27 15:03:41 +00002263 case Primitive::kPrimDouble: {
2264 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002265 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002266 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002267 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002268 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002269 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002270 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002271 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002272 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002273
2274 default:
2275 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2276 }
2277}
2278
Roland Levillaindff1f282014-11-05 14:15:05 +00002279void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2280 LocationSummary* locations =
2281 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2282 Primitive::Type result_type = conversion->GetResultType();
2283 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002284 DCHECK_NE(result_type, input_type);
David Brazdil46e2a392015-03-16 17:31:52 +00002285
David Brazdilb2bd1c52015-03-25 11:17:37 +00002286 // The Java language does not allow treating boolean as an integral type but
2287 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002288
Roland Levillaindff1f282014-11-05 14:15:05 +00002289 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002290 case Primitive::kPrimByte:
2291 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002292 case Primitive::kPrimLong:
2293 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002294 case Primitive::kPrimBoolean:
2295 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002296 case Primitive::kPrimShort:
2297 case Primitive::kPrimInt:
2298 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002299 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002300 locations->SetInAt(0, Location::Any());
2301 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2302 break;
2303
2304 default:
2305 LOG(FATAL) << "Unexpected type conversion from " << input_type
2306 << " to " << result_type;
2307 }
2308 break;
2309
Roland Levillain01a8d712014-11-14 16:27:39 +00002310 case Primitive::kPrimShort:
2311 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002312 case Primitive::kPrimLong:
2313 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002314 case Primitive::kPrimBoolean:
2315 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002316 case Primitive::kPrimByte:
2317 case Primitive::kPrimInt:
2318 case Primitive::kPrimChar:
2319 // Processing a Dex `int-to-short' instruction.
2320 locations->SetInAt(0, Location::Any());
2321 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2322 break;
2323
2324 default:
2325 LOG(FATAL) << "Unexpected type conversion from " << input_type
2326 << " to " << result_type;
2327 }
2328 break;
2329
Roland Levillain946e1432014-11-11 17:35:19 +00002330 case Primitive::kPrimInt:
2331 switch (input_type) {
2332 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002333 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002334 locations->SetInAt(0, Location::Any());
2335 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2336 break;
2337
2338 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002339 // Processing a Dex `float-to-int' instruction.
2340 locations->SetInAt(0, Location::RequiresFpuRegister());
2341 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002342 break;
2343
Roland Levillain946e1432014-11-11 17:35:19 +00002344 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002345 // Processing a Dex `double-to-int' instruction.
2346 locations->SetInAt(0, Location::RequiresFpuRegister());
2347 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002348 break;
2349
2350 default:
2351 LOG(FATAL) << "Unexpected type conversion from " << input_type
2352 << " to " << result_type;
2353 }
2354 break;
2355
Roland Levillaindff1f282014-11-05 14:15:05 +00002356 case Primitive::kPrimLong:
2357 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002358 case Primitive::kPrimBoolean:
2359 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002360 case Primitive::kPrimByte:
2361 case Primitive::kPrimShort:
2362 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002363 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002364 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002365 // TODO: We would benefit from a (to-be-implemented)
2366 // Location::RegisterOrStackSlot requirement for this input.
2367 locations->SetInAt(0, Location::RequiresRegister());
2368 locations->SetOut(Location::RequiresRegister());
2369 break;
2370
2371 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002372 // Processing a Dex `float-to-long' instruction.
2373 locations->SetInAt(0, Location::RequiresFpuRegister());
2374 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002375 break;
2376
Roland Levillaindff1f282014-11-05 14:15:05 +00002377 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002378 // Processing a Dex `double-to-long' instruction.
2379 locations->SetInAt(0, Location::RequiresFpuRegister());
2380 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002381 break;
2382
2383 default:
2384 LOG(FATAL) << "Unexpected type conversion from " << input_type
2385 << " to " << result_type;
2386 }
2387 break;
2388
Roland Levillain981e4542014-11-14 11:47:14 +00002389 case Primitive::kPrimChar:
2390 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002391 case Primitive::kPrimLong:
2392 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002393 case Primitive::kPrimBoolean:
2394 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002395 case Primitive::kPrimByte:
2396 case Primitive::kPrimShort:
2397 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002398 // Processing a Dex `int-to-char' instruction.
2399 locations->SetInAt(0, Location::Any());
2400 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2401 break;
2402
2403 default:
2404 LOG(FATAL) << "Unexpected type conversion from " << input_type
2405 << " to " << result_type;
2406 }
2407 break;
2408
Roland Levillaindff1f282014-11-05 14:15:05 +00002409 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002410 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002411 case Primitive::kPrimBoolean:
2412 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002413 case Primitive::kPrimByte:
2414 case Primitive::kPrimShort:
2415 case Primitive::kPrimInt:
2416 case Primitive::kPrimChar:
2417 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002418 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002419 locations->SetOut(Location::RequiresFpuRegister());
2420 break;
2421
2422 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002423 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002424 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002425 locations->SetOut(Location::RequiresFpuRegister());
2426 break;
2427
Roland Levillaincff13742014-11-17 14:32:17 +00002428 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002429 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002430 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002431 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002432 break;
2433
2434 default:
2435 LOG(FATAL) << "Unexpected type conversion from " << input_type
2436 << " to " << result_type;
2437 };
2438 break;
2439
Roland Levillaindff1f282014-11-05 14:15:05 +00002440 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002441 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002442 case Primitive::kPrimBoolean:
2443 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002444 case Primitive::kPrimByte:
2445 case Primitive::kPrimShort:
2446 case Primitive::kPrimInt:
2447 case Primitive::kPrimChar:
2448 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002449 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002450 locations->SetOut(Location::RequiresFpuRegister());
2451 break;
2452
2453 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002454 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002455 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002456 locations->SetOut(Location::RequiresFpuRegister());
2457 break;
2458
Roland Levillaincff13742014-11-17 14:32:17 +00002459 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002460 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002461 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002462 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002463 break;
2464
2465 default:
2466 LOG(FATAL) << "Unexpected type conversion from " << input_type
2467 << " to " << result_type;
2468 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002469 break;
2470
2471 default:
2472 LOG(FATAL) << "Unexpected type conversion from " << input_type
2473 << " to " << result_type;
2474 }
2475}
2476
2477void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2478 LocationSummary* locations = conversion->GetLocations();
2479 Location out = locations->Out();
2480 Location in = locations->InAt(0);
2481 Primitive::Type result_type = conversion->GetResultType();
2482 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002483 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002484 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002485 case Primitive::kPrimByte:
2486 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002487 case Primitive::kPrimLong:
2488 // Type conversion from long to byte is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002489 case Primitive::kPrimBoolean:
2490 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002491 case Primitive::kPrimShort:
2492 case Primitive::kPrimInt:
2493 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002494 // Processing a Dex `int-to-byte' instruction.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002495 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002496 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002497 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002498 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002499 Address(CpuRegister(RSP), in.GetStackIndex()));
2500 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002501 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002502 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002503 }
2504 break;
2505
2506 default:
2507 LOG(FATAL) << "Unexpected type conversion from " << input_type
2508 << " to " << result_type;
2509 }
2510 break;
2511
Roland Levillain01a8d712014-11-14 16:27:39 +00002512 case Primitive::kPrimShort:
2513 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002514 case Primitive::kPrimLong:
2515 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002516 case Primitive::kPrimBoolean:
2517 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002518 case Primitive::kPrimByte:
2519 case Primitive::kPrimInt:
2520 case Primitive::kPrimChar:
2521 // Processing a Dex `int-to-short' instruction.
2522 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002523 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002524 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002525 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002526 Address(CpuRegister(RSP), in.GetStackIndex()));
2527 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002528 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002529 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002530 }
2531 break;
2532
2533 default:
2534 LOG(FATAL) << "Unexpected type conversion from " << input_type
2535 << " to " << result_type;
2536 }
2537 break;
2538
Roland Levillain946e1432014-11-11 17:35:19 +00002539 case Primitive::kPrimInt:
2540 switch (input_type) {
2541 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002542 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002543 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002544 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002545 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002546 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002547 Address(CpuRegister(RSP), in.GetStackIndex()));
2548 } else {
2549 DCHECK(in.IsConstant());
2550 DCHECK(in.GetConstant()->IsLongConstant());
2551 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002552 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002553 }
2554 break;
2555
Roland Levillain3f8f9362014-12-02 17:45:01 +00002556 case Primitive::kPrimFloat: {
2557 // Processing a Dex `float-to-int' instruction.
2558 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2559 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002560 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002561
2562 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002563 // if input >= (float)INT_MAX goto done
2564 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002565 __ j(kAboveEqual, &done);
2566 // if input == NaN goto nan
2567 __ j(kUnordered, &nan);
2568 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002569 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002570 __ jmp(&done);
2571 __ Bind(&nan);
2572 // output = 0
2573 __ xorl(output, output);
2574 __ Bind(&done);
2575 break;
2576 }
2577
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002578 case Primitive::kPrimDouble: {
2579 // Processing a Dex `double-to-int' instruction.
2580 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2581 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002582 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002583
2584 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002585 // if input >= (double)INT_MAX goto done
2586 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002587 __ j(kAboveEqual, &done);
2588 // if input == NaN goto nan
2589 __ j(kUnordered, &nan);
2590 // output = double-to-int-truncate(input)
2591 __ cvttsd2si(output, input);
2592 __ jmp(&done);
2593 __ Bind(&nan);
2594 // output = 0
2595 __ xorl(output, output);
2596 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002597 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002598 }
Roland Levillain946e1432014-11-11 17:35:19 +00002599
2600 default:
2601 LOG(FATAL) << "Unexpected type conversion from " << input_type
2602 << " to " << result_type;
2603 }
2604 break;
2605
Roland Levillaindff1f282014-11-05 14:15:05 +00002606 case Primitive::kPrimLong:
2607 switch (input_type) {
2608 DCHECK(out.IsRegister());
David Brazdil46e2a392015-03-16 17:31:52 +00002609 case Primitive::kPrimBoolean:
2610 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002611 case Primitive::kPrimByte:
2612 case Primitive::kPrimShort:
2613 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002614 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002615 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002616 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002617 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002618 break;
2619
Roland Levillain624279f2014-12-04 11:54:28 +00002620 case Primitive::kPrimFloat: {
2621 // Processing a Dex `float-to-long' instruction.
2622 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2623 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002624 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002625
Mark Mendell92e83bf2015-05-07 11:25:03 -04002626 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002627 // if input >= (float)LONG_MAX goto done
2628 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002629 __ j(kAboveEqual, &done);
2630 // if input == NaN goto nan
2631 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002632 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002633 __ cvttss2si(output, input, true);
2634 __ jmp(&done);
2635 __ Bind(&nan);
2636 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002637 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002638 __ Bind(&done);
2639 break;
2640 }
2641
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002642 case Primitive::kPrimDouble: {
2643 // Processing a Dex `double-to-long' instruction.
2644 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2645 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002646 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002647
Mark Mendell92e83bf2015-05-07 11:25:03 -04002648 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002649 // if input >= (double)LONG_MAX goto done
2650 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002651 __ j(kAboveEqual, &done);
2652 // if input == NaN goto nan
2653 __ j(kUnordered, &nan);
2654 // output = double-to-long-truncate(input)
2655 __ cvttsd2si(output, input, true);
2656 __ jmp(&done);
2657 __ Bind(&nan);
2658 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002659 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002660 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002661 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002662 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002663
2664 default:
2665 LOG(FATAL) << "Unexpected type conversion from " << input_type
2666 << " to " << result_type;
2667 }
2668 break;
2669
Roland Levillain981e4542014-11-14 11:47:14 +00002670 case Primitive::kPrimChar:
2671 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002672 case Primitive::kPrimLong:
2673 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002674 case Primitive::kPrimBoolean:
2675 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002676 case Primitive::kPrimByte:
2677 case Primitive::kPrimShort:
2678 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002679 // Processing a Dex `int-to-char' instruction.
2680 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002681 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002682 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002683 __ movzxw(out.AsRegister<CpuRegister>(),
Roland Levillain981e4542014-11-14 11:47:14 +00002684 Address(CpuRegister(RSP), in.GetStackIndex()));
2685 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002686 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002687 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain981e4542014-11-14 11:47:14 +00002688 }
2689 break;
2690
2691 default:
2692 LOG(FATAL) << "Unexpected type conversion from " << input_type
2693 << " to " << result_type;
2694 }
2695 break;
2696
Roland Levillaindff1f282014-11-05 14:15:05 +00002697 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002698 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002699 case Primitive::kPrimBoolean:
2700 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002701 case Primitive::kPrimByte:
2702 case Primitive::kPrimShort:
2703 case Primitive::kPrimInt:
2704 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002705 // Processing a Dex `int-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002706 if (in.IsRegister()) {
2707 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2708 } else if (in.IsConstant()) {
2709 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2710 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002711 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002712 } else {
2713 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2714 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2715 }
Roland Levillaincff13742014-11-17 14:32:17 +00002716 break;
2717
2718 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002719 // Processing a Dex `long-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002720 if (in.IsRegister()) {
2721 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2722 } else if (in.IsConstant()) {
2723 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2724 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002725 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002726 } else {
2727 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2728 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2729 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002730 break;
2731
Roland Levillaincff13742014-11-17 14:32:17 +00002732 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002733 // Processing a Dex `double-to-float' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002734 if (in.IsFpuRegister()) {
2735 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2736 } else if (in.IsConstant()) {
2737 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2738 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002739 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002740 } else {
2741 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2742 Address(CpuRegister(RSP), in.GetStackIndex()));
2743 }
Roland Levillaincff13742014-11-17 14:32:17 +00002744 break;
2745
2746 default:
2747 LOG(FATAL) << "Unexpected type conversion from " << input_type
2748 << " to " << result_type;
2749 };
2750 break;
2751
Roland Levillaindff1f282014-11-05 14:15:05 +00002752 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002753 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002754 case Primitive::kPrimBoolean:
2755 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002756 case Primitive::kPrimByte:
2757 case Primitive::kPrimShort:
2758 case Primitive::kPrimInt:
2759 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002760 // Processing a Dex `int-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002761 if (in.IsRegister()) {
2762 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2763 } else if (in.IsConstant()) {
2764 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2765 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002766 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002767 } else {
2768 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2769 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2770 }
Roland Levillaincff13742014-11-17 14:32:17 +00002771 break;
2772
2773 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002774 // Processing a Dex `long-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002775 if (in.IsRegister()) {
2776 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2777 } else if (in.IsConstant()) {
2778 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2779 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002780 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002781 } else {
2782 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
2783 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2784 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002785 break;
2786
Roland Levillaincff13742014-11-17 14:32:17 +00002787 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002788 // Processing a Dex `float-to-double' instruction.
Mark Mendell40741f32015-04-20 22:10:34 -04002789 if (in.IsFpuRegister()) {
2790 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2791 } else if (in.IsConstant()) {
2792 float v = in.GetConstant()->AsFloatConstant()->GetValue();
2793 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002794 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002795 } else {
2796 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
2797 Address(CpuRegister(RSP), in.GetStackIndex()));
2798 }
Roland Levillaincff13742014-11-17 14:32:17 +00002799 break;
2800
2801 default:
2802 LOG(FATAL) << "Unexpected type conversion from " << input_type
2803 << " to " << result_type;
2804 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002805 break;
2806
2807 default:
2808 LOG(FATAL) << "Unexpected type conversion from " << input_type
2809 << " to " << result_type;
2810 }
2811}
2812
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002813void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002814 LocationSummary* locations =
2815 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002816 switch (add->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002817 case Primitive::kPrimInt: {
2818 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002819 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2820 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002821 break;
2822 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002823
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002824 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002825 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05002826 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04002827 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05002828 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002829 break;
2830 }
2831
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002832 case Primitive::kPrimDouble:
2833 case Primitive::kPrimFloat: {
2834 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002835 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002836 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002837 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002838 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002839
2840 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002841 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002842 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002843}
2844
2845void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
2846 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002847 Location first = locations->InAt(0);
2848 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002849 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01002850
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002851 switch (add->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002852 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002853 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002854 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2855 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002856 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2857 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002858 } else {
2859 __ leal(out.AsRegister<CpuRegister>(), Address(
2860 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2861 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002862 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002863 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2864 __ addl(out.AsRegister<CpuRegister>(),
2865 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
2866 } else {
2867 __ leal(out.AsRegister<CpuRegister>(), Address(
2868 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
2869 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002870 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00002871 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002872 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002873 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002874 break;
2875 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002876
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002877 case Primitive::kPrimLong: {
Mark Mendell09b84632015-02-13 17:48:38 -05002878 if (second.IsRegister()) {
2879 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2880 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002881 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2882 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05002883 } else {
2884 __ leaq(out.AsRegister<CpuRegister>(), Address(
2885 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
2886 }
2887 } else {
2888 DCHECK(second.IsConstant());
2889 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2890 int32_t int32_value = Low32Bits(value);
2891 DCHECK_EQ(int32_value, value);
2892 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2893 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
2894 } else {
2895 __ leaq(out.AsRegister<CpuRegister>(), Address(
2896 first.AsRegister<CpuRegister>(), int32_value));
2897 }
2898 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002899 break;
2900 }
2901
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002902 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002903 if (second.IsFpuRegister()) {
2904 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2905 } else if (second.IsConstant()) {
2906 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002907 codegen_->LiteralFloatAddress(
2908 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002909 } else {
2910 DCHECK(second.IsStackSlot());
2911 __ addss(first.AsFpuRegister<XmmRegister>(),
2912 Address(CpuRegister(RSP), second.GetStackIndex()));
2913 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002914 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002915 }
2916
2917 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002918 if (second.IsFpuRegister()) {
2919 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2920 } else if (second.IsConstant()) {
2921 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002922 codegen_->LiteralDoubleAddress(
2923 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04002924 } else {
2925 DCHECK(second.IsDoubleStackSlot());
2926 __ addsd(first.AsFpuRegister<XmmRegister>(),
2927 Address(CpuRegister(RSP), second.GetStackIndex()));
2928 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002929 break;
2930 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002931
2932 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002933 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002934 }
2935}
2936
2937void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002938 LocationSummary* locations =
2939 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002940 switch (sub->GetResultType()) {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002941 case Primitive::kPrimInt: {
2942 locations->SetInAt(0, Location::RequiresRegister());
2943 locations->SetInAt(1, Location::Any());
2944 locations->SetOut(Location::SameAsFirstInput());
2945 break;
2946 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002947 case Primitive::kPrimLong: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002948 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04002949 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002950 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002951 break;
2952 }
Calin Juravle11351682014-10-23 15:38:15 +01002953 case Primitive::kPrimFloat:
2954 case Primitive::kPrimDouble: {
2955 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04002956 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01002957 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002958 break;
Calin Juravle11351682014-10-23 15:38:15 +01002959 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002960 default:
Calin Juravle11351682014-10-23 15:38:15 +01002961 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002962 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002963}
2964
2965void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
2966 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01002967 Location first = locations->InAt(0);
2968 Location second = locations->InAt(1);
2969 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002970 switch (sub->GetResultType()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002971 case Primitive::kPrimInt: {
Calin Juravle11351682014-10-23 15:38:15 +01002972 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002973 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01002974 } else if (second.IsConstant()) {
2975 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002976 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002977 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002978 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002979 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00002980 break;
2981 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002982 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04002983 if (second.IsConstant()) {
2984 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2985 DCHECK(IsInt<32>(value));
2986 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
2987 } else {
2988 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
2989 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002990 break;
2991 }
2992
Calin Juravle11351682014-10-23 15:38:15 +01002993 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04002994 if (second.IsFpuRegister()) {
2995 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2996 } else if (second.IsConstant()) {
2997 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002998 codegen_->LiteralFloatAddress(
2999 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003000 } else {
3001 DCHECK(second.IsStackSlot());
3002 __ subss(first.AsFpuRegister<XmmRegister>(),
3003 Address(CpuRegister(RSP), second.GetStackIndex()));
3004 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003005 break;
Calin Juravle11351682014-10-23 15:38:15 +01003006 }
3007
3008 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003009 if (second.IsFpuRegister()) {
3010 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3011 } else if (second.IsConstant()) {
3012 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003013 codegen_->LiteralDoubleAddress(
3014 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003015 } else {
3016 DCHECK(second.IsDoubleStackSlot());
3017 __ subsd(first.AsFpuRegister<XmmRegister>(),
3018 Address(CpuRegister(RSP), second.GetStackIndex()));
3019 }
Calin Juravle11351682014-10-23 15:38:15 +01003020 break;
3021 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003022
3023 default:
Calin Juravle11351682014-10-23 15:38:15 +01003024 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003025 }
3026}
3027
Calin Juravle34bacdf2014-10-07 20:23:36 +01003028void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3029 LocationSummary* locations =
3030 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3031 switch (mul->GetResultType()) {
3032 case Primitive::kPrimInt: {
3033 locations->SetInAt(0, Location::RequiresRegister());
3034 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003035 if (mul->InputAt(1)->IsIntConstant()) {
3036 // Can use 3 operand multiply.
3037 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3038 } else {
3039 locations->SetOut(Location::SameAsFirstInput());
3040 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003041 break;
3042 }
3043 case Primitive::kPrimLong: {
3044 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003045 locations->SetInAt(1, Location::Any());
3046 if (mul->InputAt(1)->IsLongConstant() &&
3047 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003048 // Can use 3 operand multiply.
3049 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3050 } else {
3051 locations->SetOut(Location::SameAsFirstInput());
3052 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003053 break;
3054 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003055 case Primitive::kPrimFloat:
3056 case Primitive::kPrimDouble: {
3057 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003058 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003059 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003060 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003061 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003062
3063 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003064 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003065 }
3066}
3067
3068void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3069 LocationSummary* locations = mul->GetLocations();
3070 Location first = locations->InAt(0);
3071 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003072 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003073 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003074 case Primitive::kPrimInt:
3075 // The constant may have ended up in a register, so test explicitly to avoid
3076 // problems where the output may not be the same as the first operand.
3077 if (mul->InputAt(1)->IsIntConstant()) {
3078 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3079 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3080 } else if (second.IsRegister()) {
3081 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003082 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003083 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003084 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003085 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003086 __ imull(first.AsRegister<CpuRegister>(),
3087 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003088 }
3089 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003090 case Primitive::kPrimLong: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003091 // The constant may have ended up in a register, so test explicitly to avoid
3092 // problems where the output may not be the same as the first operand.
3093 if (mul->InputAt(1)->IsLongConstant()) {
3094 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3095 if (IsInt<32>(value)) {
3096 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3097 Immediate(static_cast<int32_t>(value)));
3098 } else {
3099 // Have to use the constant area.
3100 DCHECK(first.Equals(out));
3101 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3102 }
3103 } else if (second.IsRegister()) {
3104 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003105 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003106 } else {
3107 DCHECK(second.IsDoubleStackSlot());
3108 DCHECK(first.Equals(out));
3109 __ imulq(first.AsRegister<CpuRegister>(),
3110 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003111 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003112 break;
3113 }
3114
Calin Juravleb5bfa962014-10-21 18:02:24 +01003115 case Primitive::kPrimFloat: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003116 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003117 if (second.IsFpuRegister()) {
3118 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3119 } else if (second.IsConstant()) {
3120 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003121 codegen_->LiteralFloatAddress(
3122 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003123 } else {
3124 DCHECK(second.IsStackSlot());
3125 __ mulss(first.AsFpuRegister<XmmRegister>(),
3126 Address(CpuRegister(RSP), second.GetStackIndex()));
3127 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003128 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003129 }
3130
3131 case Primitive::kPrimDouble: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003132 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003133 if (second.IsFpuRegister()) {
3134 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3135 } else if (second.IsConstant()) {
3136 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003137 codegen_->LiteralDoubleAddress(
3138 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003139 } else {
3140 DCHECK(second.IsDoubleStackSlot());
3141 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3142 Address(CpuRegister(RSP), second.GetStackIndex()));
3143 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003144 break;
3145 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003146
3147 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003148 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003149 }
3150}
3151
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003152void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3153 uint32_t stack_adjustment, bool is_float) {
3154 if (source.IsStackSlot()) {
3155 DCHECK(is_float);
3156 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3157 } else if (source.IsDoubleStackSlot()) {
3158 DCHECK(!is_float);
3159 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3160 } else {
3161 // Write the value to the temporary location on the stack and load to FP stack.
3162 if (is_float) {
3163 Location stack_temp = Location::StackSlot(temp_offset);
3164 codegen_->Move(stack_temp, source);
3165 __ flds(Address(CpuRegister(RSP), temp_offset));
3166 } else {
3167 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3168 codegen_->Move(stack_temp, source);
3169 __ fldl(Address(CpuRegister(RSP), temp_offset));
3170 }
3171 }
3172}
3173
3174void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
3175 Primitive::Type type = rem->GetResultType();
3176 bool is_float = type == Primitive::kPrimFloat;
3177 size_t elem_size = Primitive::ComponentSize(type);
3178 LocationSummary* locations = rem->GetLocations();
3179 Location first = locations->InAt(0);
3180 Location second = locations->InAt(1);
3181 Location out = locations->Out();
3182
3183 // Create stack space for 2 elements.
3184 // TODO: enhance register allocator to ask for stack temporaries.
3185 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3186
3187 // Load the values to the FP stack in reverse order, using temporaries if needed.
3188 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3189 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3190
3191 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003192 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003193 __ Bind(&retry);
3194 __ fprem();
3195
3196 // Move FP status to AX.
3197 __ fstsw();
3198
3199 // And see if the argument reduction is complete. This is signaled by the
3200 // C2 FPU flag bit set to 0.
3201 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3202 __ j(kNotEqual, &retry);
3203
3204 // We have settled on the final value. Retrieve it into an XMM register.
3205 // Store FP top of stack to real stack.
3206 if (is_float) {
3207 __ fsts(Address(CpuRegister(RSP), 0));
3208 } else {
3209 __ fstl(Address(CpuRegister(RSP), 0));
3210 }
3211
3212 // Pop the 2 items from the FP stack.
3213 __ fucompp();
3214
3215 // Load the value from the stack into an XMM register.
3216 DCHECK(out.IsFpuRegister()) << out;
3217 if (is_float) {
3218 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3219 } else {
3220 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3221 }
3222
3223 // And remove the temporary stack space we allocated.
3224 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3225}
3226
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003227void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3228 DCHECK(instruction->IsDiv() || instruction->IsRem());
3229
3230 LocationSummary* locations = instruction->GetLocations();
3231 Location second = locations->InAt(1);
3232 DCHECK(second.IsConstant());
3233
3234 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3235 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003236 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003237
3238 DCHECK(imm == 1 || imm == -1);
3239
3240 switch (instruction->GetResultType()) {
3241 case Primitive::kPrimInt: {
3242 if (instruction->IsRem()) {
3243 __ xorl(output_register, output_register);
3244 } else {
3245 __ movl(output_register, input_register);
3246 if (imm == -1) {
3247 __ negl(output_register);
3248 }
3249 }
3250 break;
3251 }
3252
3253 case Primitive::kPrimLong: {
3254 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003255 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003256 } else {
3257 __ movq(output_register, input_register);
3258 if (imm == -1) {
3259 __ negq(output_register);
3260 }
3261 }
3262 break;
3263 }
3264
3265 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003266 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003267 }
3268}
3269
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003270void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003271 LocationSummary* locations = instruction->GetLocations();
3272 Location second = locations->InAt(1);
3273
3274 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3275 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3276
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003277 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003278 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3279 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003280
3281 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3282
3283 if (instruction->GetResultType() == Primitive::kPrimInt) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003284 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003285 __ testl(numerator, numerator);
3286 __ cmov(kGreaterEqual, tmp, numerator);
3287 int shift = CTZ(imm);
3288 __ sarl(tmp, Immediate(shift));
3289
3290 if (imm < 0) {
3291 __ negl(tmp);
3292 }
3293
3294 __ movl(output_register, tmp);
3295 } else {
3296 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3297 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3298
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003299 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003300 __ addq(rdx, numerator);
3301 __ testq(numerator, numerator);
3302 __ cmov(kGreaterEqual, rdx, numerator);
3303 int shift = CTZ(imm);
3304 __ sarq(rdx, Immediate(shift));
3305
3306 if (imm < 0) {
3307 __ negq(rdx);
3308 }
3309
3310 __ movq(output_register, rdx);
3311 }
3312}
3313
3314void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3315 DCHECK(instruction->IsDiv() || instruction->IsRem());
3316
3317 LocationSummary* locations = instruction->GetLocations();
3318 Location second = locations->InAt(1);
3319
3320 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3321 : locations->GetTemp(0).AsRegister<CpuRegister>();
3322 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3323 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3324 : locations->Out().AsRegister<CpuRegister>();
3325 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3326
3327 DCHECK_EQ(RAX, eax.AsRegister());
3328 DCHECK_EQ(RDX, edx.AsRegister());
3329 if (instruction->IsDiv()) {
3330 DCHECK_EQ(RAX, out.AsRegister());
3331 } else {
3332 DCHECK_EQ(RDX, out.AsRegister());
3333 }
3334
3335 int64_t magic;
3336 int shift;
3337
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003338 // TODO: can these branches be written as one?
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003339 if (instruction->GetResultType() == Primitive::kPrimInt) {
3340 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3341
3342 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3343
3344 __ movl(numerator, eax);
3345
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003346 __ movl(eax, Immediate(magic));
3347 __ imull(numerator);
3348
3349 if (imm > 0 && magic < 0) {
3350 __ addl(edx, numerator);
3351 } else if (imm < 0 && magic > 0) {
3352 __ subl(edx, numerator);
3353 }
3354
3355 if (shift != 0) {
3356 __ sarl(edx, Immediate(shift));
3357 }
3358
3359 __ movl(eax, edx);
3360 __ shrl(edx, Immediate(31));
3361 __ addl(edx, eax);
3362
3363 if (instruction->IsRem()) {
3364 __ movl(eax, numerator);
3365 __ imull(edx, Immediate(imm));
3366 __ subl(eax, edx);
3367 __ movl(edx, eax);
3368 } else {
3369 __ movl(eax, edx);
3370 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003371 } else {
3372 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3373
3374 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3375
3376 CpuRegister rax = eax;
3377 CpuRegister rdx = edx;
3378
3379 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3380
3381 // Save the numerator.
3382 __ movq(numerator, rax);
3383
3384 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003385 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003386
3387 // RDX:RAX = magic * numerator
3388 __ imulq(numerator);
3389
3390 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003391 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003392 __ addq(rdx, numerator);
3393 } else if (imm < 0 && magic > 0) {
3394 // RDX -= numerator
3395 __ subq(rdx, numerator);
3396 }
3397
3398 // Shift if needed.
3399 if (shift != 0) {
3400 __ sarq(rdx, Immediate(shift));
3401 }
3402
3403 // RDX += 1 if RDX < 0
3404 __ movq(rax, rdx);
3405 __ shrq(rdx, Immediate(63));
3406 __ addq(rdx, rax);
3407
3408 if (instruction->IsRem()) {
3409 __ movq(rax, numerator);
3410
3411 if (IsInt<32>(imm)) {
3412 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3413 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003414 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003415 }
3416
3417 __ subq(rax, rdx);
3418 __ movq(rdx, rax);
3419 } else {
3420 __ movq(rax, rdx);
3421 }
3422 }
3423}
3424
Calin Juravlebacfec32014-11-14 15:54:36 +00003425void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3426 DCHECK(instruction->IsDiv() || instruction->IsRem());
3427 Primitive::Type type = instruction->GetResultType();
3428 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3429
3430 bool is_div = instruction->IsDiv();
3431 LocationSummary* locations = instruction->GetLocations();
3432
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003433 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3434 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003435
Roland Levillain271ab9c2014-11-27 15:23:57 +00003436 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003437 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003438
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003439 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003440 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003441
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003442 if (imm == 0) {
3443 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3444 } else if (imm == 1 || imm == -1) {
3445 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003446 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003447 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003448 } else {
3449 DCHECK(imm <= -2 || imm >= 2);
3450 GenerateDivRemWithAnyConstant(instruction);
3451 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003452 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003453 SlowPathCode* slow_path =
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003454 new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003455 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003456 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003457
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003458 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3459 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3460 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3461 // so it's safe to just use negl instead of more complex comparisons.
3462 if (type == Primitive::kPrimInt) {
3463 __ cmpl(second_reg, Immediate(-1));
3464 __ j(kEqual, slow_path->GetEntryLabel());
3465 // edx:eax <- sign-extended of eax
3466 __ cdq();
3467 // eax = quotient, edx = remainder
3468 __ idivl(second_reg);
3469 } else {
3470 __ cmpq(second_reg, Immediate(-1));
3471 __ j(kEqual, slow_path->GetEntryLabel());
3472 // rdx:rax <- sign-extended of rax
3473 __ cqo();
3474 // rax = quotient, rdx = remainder
3475 __ idivq(second_reg);
3476 }
3477 __ Bind(slow_path->GetExitLabel());
3478 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003479}
3480
Calin Juravle7c4954d2014-10-28 16:57:40 +00003481void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3482 LocationSummary* locations =
3483 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3484 switch (div->GetResultType()) {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003485 case Primitive::kPrimInt:
3486 case Primitive::kPrimLong: {
Calin Juravled0d48522014-11-04 16:40:20 +00003487 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003488 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003489 locations->SetOut(Location::SameAsFirstInput());
3490 // Intel uses edx:eax as the dividend.
3491 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003492 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3493 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3494 // output and request another temp.
3495 if (div->InputAt(1)->IsConstant()) {
3496 locations->AddTemp(Location::RequiresRegister());
3497 }
Calin Juravled0d48522014-11-04 16:40:20 +00003498 break;
3499 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003500
Calin Juravle7c4954d2014-10-28 16:57:40 +00003501 case Primitive::kPrimFloat:
3502 case Primitive::kPrimDouble: {
3503 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003504 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003505 locations->SetOut(Location::SameAsFirstInput());
3506 break;
3507 }
3508
3509 default:
3510 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3511 }
3512}
3513
3514void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3515 LocationSummary* locations = div->GetLocations();
3516 Location first = locations->InAt(0);
3517 Location second = locations->InAt(1);
3518 DCHECK(first.Equals(locations->Out()));
3519
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003520 Primitive::Type type = div->GetResultType();
3521 switch (type) {
3522 case Primitive::kPrimInt:
3523 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003524 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003525 break;
3526 }
3527
Calin Juravle7c4954d2014-10-28 16:57:40 +00003528 case Primitive::kPrimFloat: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003529 if (second.IsFpuRegister()) {
3530 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3531 } else if (second.IsConstant()) {
3532 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003533 codegen_->LiteralFloatAddress(
3534 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003535 } else {
3536 DCHECK(second.IsStackSlot());
3537 __ divss(first.AsFpuRegister<XmmRegister>(),
3538 Address(CpuRegister(RSP), second.GetStackIndex()));
3539 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003540 break;
3541 }
3542
3543 case Primitive::kPrimDouble: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003544 if (second.IsFpuRegister()) {
3545 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3546 } else if (second.IsConstant()) {
3547 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003548 codegen_->LiteralDoubleAddress(
3549 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003550 } else {
3551 DCHECK(second.IsDoubleStackSlot());
3552 __ divsd(first.AsFpuRegister<XmmRegister>(),
3553 Address(CpuRegister(RSP), second.GetStackIndex()));
3554 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003555 break;
3556 }
3557
3558 default:
3559 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3560 }
3561}
3562
Calin Juravlebacfec32014-11-14 15:54:36 +00003563void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003564 Primitive::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003565 LocationSummary* locations =
3566 new (GetGraph()->GetArena()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003567
3568 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003569 case Primitive::kPrimInt:
3570 case Primitive::kPrimLong: {
3571 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003572 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003573 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3574 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003575 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3576 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3577 // output and request another temp.
3578 if (rem->InputAt(1)->IsConstant()) {
3579 locations->AddTemp(Location::RequiresRegister());
3580 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003581 break;
3582 }
3583
3584 case Primitive::kPrimFloat:
3585 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003586 locations->SetInAt(0, Location::Any());
3587 locations->SetInAt(1, Location::Any());
3588 locations->SetOut(Location::RequiresFpuRegister());
3589 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003590 break;
3591 }
3592
3593 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003594 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003595 }
3596}
3597
3598void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
3599 Primitive::Type type = rem->GetResultType();
3600 switch (type) {
3601 case Primitive::kPrimInt:
3602 case Primitive::kPrimLong: {
3603 GenerateDivRemIntegral(rem);
3604 break;
3605 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003606 case Primitive::kPrimFloat:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003607 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003608 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003609 break;
3610 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003611 default:
3612 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3613 }
3614}
3615
Calin Juravled0d48522014-11-04 16:40:20 +00003616void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003617 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003618 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00003619}
3620
3621void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003622 SlowPathCode* slow_path =
Calin Juravled0d48522014-11-04 16:40:20 +00003623 new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86_64(instruction);
3624 codegen_->AddSlowPath(slow_path);
3625
3626 LocationSummary* locations = instruction->GetLocations();
3627 Location value = locations->InAt(0);
3628
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003629 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003630 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003631 case Primitive::kPrimByte:
3632 case Primitive::kPrimChar:
3633 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003634 case Primitive::kPrimInt: {
3635 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003636 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003637 __ j(kEqual, slow_path->GetEntryLabel());
3638 } else if (value.IsStackSlot()) {
3639 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3640 __ j(kEqual, slow_path->GetEntryLabel());
3641 } else {
3642 DCHECK(value.IsConstant()) << value;
3643 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003644 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003645 }
3646 }
3647 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003648 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003649 case Primitive::kPrimLong: {
3650 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003651 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003652 __ j(kEqual, slow_path->GetEntryLabel());
3653 } else if (value.IsDoubleStackSlot()) {
3654 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
3655 __ j(kEqual, slow_path->GetEntryLabel());
3656 } else {
3657 DCHECK(value.IsConstant()) << value;
3658 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003659 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003660 }
3661 }
3662 break;
3663 }
3664 default:
3665 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003666 }
Calin Juravled0d48522014-11-04 16:40:20 +00003667}
3668
Calin Juravle9aec02f2014-11-18 23:06:35 +00003669void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
3670 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3671
3672 LocationSummary* locations =
3673 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3674
3675 switch (op->GetResultType()) {
3676 case Primitive::kPrimInt:
3677 case Primitive::kPrimLong: {
3678 locations->SetInAt(0, Location::RequiresRegister());
3679 // The shift count needs to be in CL.
3680 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
3681 locations->SetOut(Location::SameAsFirstInput());
3682 break;
3683 }
3684 default:
3685 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
3686 }
3687}
3688
3689void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
3690 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3691
3692 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003693 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003694 Location second = locations->InAt(1);
3695
3696 switch (op->GetResultType()) {
3697 case Primitive::kPrimInt: {
3698 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003699 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003700 if (op->IsShl()) {
3701 __ shll(first_reg, second_reg);
3702 } else if (op->IsShr()) {
3703 __ sarl(first_reg, second_reg);
3704 } else {
3705 __ shrl(first_reg, second_reg);
3706 }
3707 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003708 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003709 if (op->IsShl()) {
3710 __ shll(first_reg, imm);
3711 } else if (op->IsShr()) {
3712 __ sarl(first_reg, imm);
3713 } else {
3714 __ shrl(first_reg, imm);
3715 }
3716 }
3717 break;
3718 }
3719 case Primitive::kPrimLong: {
3720 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003721 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003722 if (op->IsShl()) {
3723 __ shlq(first_reg, second_reg);
3724 } else if (op->IsShr()) {
3725 __ sarq(first_reg, second_reg);
3726 } else {
3727 __ shrq(first_reg, second_reg);
3728 }
3729 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003730 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003731 if (op->IsShl()) {
3732 __ shlq(first_reg, imm);
3733 } else if (op->IsShr()) {
3734 __ sarq(first_reg, imm);
3735 } else {
3736 __ shrq(first_reg, imm);
3737 }
3738 }
3739 break;
3740 }
3741 default:
3742 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00003743 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003744 }
3745}
3746
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003747void LocationsBuilderX86_64::VisitRor(HRor* ror) {
3748 LocationSummary* locations =
3749 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
3750
3751 switch (ror->GetResultType()) {
3752 case Primitive::kPrimInt:
3753 case Primitive::kPrimLong: {
3754 locations->SetInAt(0, Location::RequiresRegister());
3755 // The shift count needs to be in CL (unless it is a constant).
3756 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
3757 locations->SetOut(Location::SameAsFirstInput());
3758 break;
3759 }
3760 default:
3761 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3762 UNREACHABLE();
3763 }
3764}
3765
3766void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
3767 LocationSummary* locations = ror->GetLocations();
3768 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
3769 Location second = locations->InAt(1);
3770
3771 switch (ror->GetResultType()) {
3772 case Primitive::kPrimInt:
3773 if (second.IsRegister()) {
3774 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3775 __ rorl(first_reg, second_reg);
3776 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003777 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003778 __ rorl(first_reg, imm);
3779 }
3780 break;
3781 case Primitive::kPrimLong:
3782 if (second.IsRegister()) {
3783 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3784 __ rorq(first_reg, second_reg);
3785 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003786 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003787 __ rorq(first_reg, imm);
3788 }
3789 break;
3790 default:
3791 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
3792 UNREACHABLE();
3793 }
3794}
3795
Calin Juravle9aec02f2014-11-18 23:06:35 +00003796void LocationsBuilderX86_64::VisitShl(HShl* shl) {
3797 HandleShift(shl);
3798}
3799
3800void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
3801 HandleShift(shl);
3802}
3803
3804void LocationsBuilderX86_64::VisitShr(HShr* shr) {
3805 HandleShift(shr);
3806}
3807
3808void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
3809 HandleShift(shr);
3810}
3811
3812void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
3813 HandleShift(ushr);
3814}
3815
3816void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
3817 HandleShift(ushr);
3818}
3819
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003820void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003821 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003822 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01003823 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003824 if (instruction->IsStringAlloc()) {
3825 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3826 } else {
3827 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3828 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3829 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003830 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003831}
3832
3833void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01003834 // Note: if heap poisoning is enabled, the entry point takes cares
3835 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00003836 if (instruction->IsStringAlloc()) {
3837 // String is allocated through StringFactory. Call NewEmptyString entry point.
3838 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07003839 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003840 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
3841 __ call(Address(temp, code_offset.SizeValue()));
3842 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3843 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003844 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003845 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3846 DCHECK(!codegen_->IsLeafMethod());
3847 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003848}
3849
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003850void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
3851 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003852 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003853 InvokeRuntimeCallingConvention calling_convention;
3854 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003855 locations->SetOut(Location::RegisterLocation(RAX));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08003856 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01003857 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003858}
3859
3860void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
3861 InvokeRuntimeCallingConvention calling_convention;
Mark Mendell92e83bf2015-05-07 11:25:03 -04003862 codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
3863 instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01003864 // Note: if heap poisoning is enabled, the entry point takes cares
3865 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01003866 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003867 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003868
3869 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01003870}
3871
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003872void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003873 LocationSummary* locations =
3874 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003875 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3876 if (location.IsStackSlot()) {
3877 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3878 } else if (location.IsDoubleStackSlot()) {
3879 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3880 }
3881 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003882}
3883
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003884void InstructionCodeGeneratorX86_64::VisitParameterValue(
3885 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003886 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01003887}
3888
3889void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
3890 LocationSummary* locations =
3891 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3892 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3893}
3894
3895void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
3896 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
3897 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003898}
3899
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003900void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3901 LocationSummary* locations =
3902 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3903 locations->SetInAt(0, Location::RequiresRegister());
3904 locations->SetOut(Location::RequiresRegister());
3905}
3906
3907void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
3908 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00003909 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003910 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003911 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003912 __ movq(locations->Out().AsRegister<CpuRegister>(),
3913 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003914 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003915 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003916 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003917 __ movq(locations->Out().AsRegister<CpuRegister>(),
3918 Address(locations->InAt(0).AsRegister<CpuRegister>(),
3919 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01003920 __ movq(locations->Out().AsRegister<CpuRegister>(),
3921 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003922 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00003923}
3924
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003925void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003926 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003927 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003928 locations->SetInAt(0, Location::RequiresRegister());
3929 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003930}
3931
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003932void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
3933 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003934 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3935 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003936 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00003937 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003938 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003939 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003940 break;
3941
3942 case Primitive::kPrimLong:
Roland Levillain271ab9c2014-11-27 15:23:57 +00003943 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01003944 break;
3945
3946 default:
3947 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
3948 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003949}
3950
David Brazdil66d126e2015-04-03 16:02:44 +01003951void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
3952 LocationSummary* locations =
3953 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
3954 locations->SetInAt(0, Location::RequiresRegister());
3955 locations->SetOut(Location::SameAsFirstInput());
3956}
3957
3958void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01003959 LocationSummary* locations = bool_not->GetLocations();
3960 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
3961 locations->Out().AsRegister<CpuRegister>().AsRegister());
3962 Location out = locations->Out();
3963 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
3964}
3965
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003966void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003967 LocationSummary* locations =
3968 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003969 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003970 locations->SetInAt(i, Location::Any());
3971 }
3972 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003973}
3974
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003975void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003976 LOG(FATAL) << "Unimplemented";
3977}
3978
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003979void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00003980 /*
3981 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003982 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00003983 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
3984 */
3985 switch (kind) {
3986 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00003987 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00003988 break;
3989 }
3990 case MemBarrierKind::kAnyStore:
3991 case MemBarrierKind::kLoadAny:
3992 case MemBarrierKind::kStoreStore: {
3993 // nop
3994 break;
3995 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05003996 case MemBarrierKind::kNTStoreStore:
3997 // Non-Temporal Store/Store needs an explicit fence.
3998 MemoryFence(/* non-temporal */ true);
3999 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004000 }
4001}
4002
4003void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4004 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4005
Roland Levillain0d5a2812015-11-13 10:07:31 +00004006 bool object_field_get_with_read_barrier =
4007 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004008 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004009 new (GetGraph()->GetArena()) LocationSummary(instruction,
4010 object_field_get_with_read_barrier ?
4011 LocationSummary::kCallOnSlowPath :
4012 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004013 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004014 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004015 }
Calin Juravle52c48962014-12-16 17:02:57 +00004016 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004017 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4018 locations->SetOut(Location::RequiresFpuRegister());
4019 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004020 // The output overlaps for an object field get when read barriers
4021 // are enabled: we do not want the move to overwrite the object's
4022 // location, as we need it to emit the read barrier.
4023 locations->SetOut(
4024 Location::RequiresRegister(),
4025 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004026 }
Calin Juravle52c48962014-12-16 17:02:57 +00004027}
4028
4029void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4030 const FieldInfo& field_info) {
4031 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4032
4033 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004034 Location base_loc = locations->InAt(0);
4035 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004036 Location out = locations->Out();
4037 bool is_volatile = field_info.IsVolatile();
4038 Primitive::Type field_type = field_info.GetFieldType();
4039 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4040
4041 switch (field_type) {
4042 case Primitive::kPrimBoolean: {
4043 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4044 break;
4045 }
4046
4047 case Primitive::kPrimByte: {
4048 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4049 break;
4050 }
4051
4052 case Primitive::kPrimShort: {
4053 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4054 break;
4055 }
4056
4057 case Primitive::kPrimChar: {
4058 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
4059 break;
4060 }
4061
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004062 case Primitive::kPrimInt: {
Calin Juravle52c48962014-12-16 17:02:57 +00004063 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4064 break;
4065 }
4066
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004067 case Primitive::kPrimNot: {
4068 // /* HeapReference<Object> */ out = *(base + offset)
4069 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004070 // Note that a potential implicit null check is handled in this
4071 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4072 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004073 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004074 if (is_volatile) {
4075 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4076 }
4077 } else {
4078 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4079 codegen_->MaybeRecordImplicitNullCheck(instruction);
4080 if (is_volatile) {
4081 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4082 }
4083 // If read barriers are enabled, emit read barriers other than
4084 // Baker's using a slow path (and also unpoison the loaded
4085 // reference, if heap poisoning is enabled).
4086 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4087 }
4088 break;
4089 }
4090
Calin Juravle52c48962014-12-16 17:02:57 +00004091 case Primitive::kPrimLong: {
4092 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4093 break;
4094 }
4095
4096 case Primitive::kPrimFloat: {
4097 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4098 break;
4099 }
4100
4101 case Primitive::kPrimDouble: {
4102 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4103 break;
4104 }
4105
4106 case Primitive::kPrimVoid:
4107 LOG(FATAL) << "Unreachable type " << field_type;
4108 UNREACHABLE();
4109 }
4110
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004111 if (field_type == Primitive::kPrimNot) {
4112 // Potential implicit null checks, in the case of reference
4113 // fields, are handled in the previous switch statement.
4114 } else {
4115 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004116 }
Roland Levillain4d027112015-07-01 15:41:14 +01004117
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004118 if (is_volatile) {
4119 if (field_type == Primitive::kPrimNot) {
4120 // Memory barriers, in the case of references, are also handled
4121 // in the previous switch statement.
4122 } else {
4123 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4124 }
Roland Levillain4d027112015-07-01 15:41:14 +01004125 }
Calin Juravle52c48962014-12-16 17:02:57 +00004126}
4127
4128void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4129 const FieldInfo& field_info) {
4130 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4131
4132 LocationSummary* locations =
4133 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Roland Levillain4d027112015-07-01 15:41:14 +01004134 Primitive::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004135 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004136 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004137 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004138
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004139 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004140 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004141 if (is_volatile) {
4142 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4143 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4144 } else {
4145 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4146 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004147 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004148 if (is_volatile) {
4149 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4150 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4151 } else {
4152 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4153 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004154 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004155 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004156 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004157 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004158 locations->AddTemp(Location::RequiresRegister());
Roland Levillain4d027112015-07-01 15:41:14 +01004159 } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4160 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004161 locations->AddTemp(Location::RequiresRegister());
4162 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004163}
4164
Calin Juravle52c48962014-12-16 17:02:57 +00004165void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004166 const FieldInfo& field_info,
4167 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004168 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4169
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004170 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004171 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4172 Location value = locations->InAt(1);
4173 bool is_volatile = field_info.IsVolatile();
4174 Primitive::Type field_type = field_info.GetFieldType();
4175 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4176
4177 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004178 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004179 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004180
Mark Mendellea5af682015-10-22 17:35:49 -04004181 bool maybe_record_implicit_null_check_done = false;
4182
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004183 switch (field_type) {
4184 case Primitive::kPrimBoolean:
4185 case Primitive::kPrimByte: {
Mark Mendell40741f32015-04-20 22:10:34 -04004186 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004187 int8_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004188 __ movb(Address(base, offset), Immediate(v));
4189 } else {
4190 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4191 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004192 break;
4193 }
4194
4195 case Primitive::kPrimShort:
4196 case Primitive::kPrimChar: {
Mark Mendell40741f32015-04-20 22:10:34 -04004197 if (value.IsConstant()) {
Mark Mendellea5af682015-10-22 17:35:49 -04004198 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Mark Mendell40741f32015-04-20 22:10:34 -04004199 __ movw(Address(base, offset), Immediate(v));
4200 } else {
4201 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4202 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004203 break;
4204 }
4205
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004206 case Primitive::kPrimInt:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004207 case Primitive::kPrimNot: {
Mark Mendell40741f32015-04-20 22:10:34 -04004208 if (value.IsConstant()) {
4209 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Roland Levillain4d027112015-07-01 15:41:14 +01004210 // `field_type == Primitive::kPrimNot` implies `v == 0`.
4211 DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
4212 // Note: if heap poisoning is enabled, no need to poison
4213 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004214 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004215 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01004216 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
4217 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4218 __ movl(temp, value.AsRegister<CpuRegister>());
4219 __ PoisonHeapReference(temp);
4220 __ movl(Address(base, offset), temp);
4221 } else {
4222 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4223 }
Mark Mendell40741f32015-04-20 22:10:34 -04004224 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004225 break;
4226 }
4227
4228 case Primitive::kPrimLong: {
Mark Mendell40741f32015-04-20 22:10:34 -04004229 if (value.IsConstant()) {
4230 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004231 codegen_->MoveInt64ToAddress(Address(base, offset),
4232 Address(base, offset + sizeof(int32_t)),
4233 v,
4234 instruction);
4235 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004236 } else {
4237 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4238 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004239 break;
4240 }
4241
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004242 case Primitive::kPrimFloat: {
Mark Mendellea5af682015-10-22 17:35:49 -04004243 if (value.IsConstant()) {
4244 int32_t v =
4245 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4246 __ movl(Address(base, offset), Immediate(v));
4247 } else {
4248 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4249 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004250 break;
4251 }
4252
4253 case Primitive::kPrimDouble: {
Mark Mendellea5af682015-10-22 17:35:49 -04004254 if (value.IsConstant()) {
4255 int64_t v =
4256 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4257 codegen_->MoveInt64ToAddress(Address(base, offset),
4258 Address(base, offset + sizeof(int32_t)),
4259 v,
4260 instruction);
4261 maybe_record_implicit_null_check_done = true;
4262 } else {
4263 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4264 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004265 break;
4266 }
4267
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004268 case Primitive::kPrimVoid:
4269 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004270 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004271 }
Calin Juravle52c48962014-12-16 17:02:57 +00004272
Mark Mendellea5af682015-10-22 17:35:49 -04004273 if (!maybe_record_implicit_null_check_done) {
4274 codegen_->MaybeRecordImplicitNullCheck(instruction);
4275 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004276
4277 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4278 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4279 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004280 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004281 }
4282
Calin Juravle52c48962014-12-16 17:02:57 +00004283 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004284 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004285 }
4286}
4287
4288void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4289 HandleFieldSet(instruction, instruction->GetFieldInfo());
4290}
4291
4292void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004293 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004294}
4295
4296void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004297 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004298}
4299
4300void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004301 HandleFieldGet(instruction, instruction->GetFieldInfo());
4302}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004303
Calin Juravle52c48962014-12-16 17:02:57 +00004304void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4305 HandleFieldGet(instruction);
4306}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004307
Calin Juravle52c48962014-12-16 17:02:57 +00004308void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4309 HandleFieldGet(instruction, instruction->GetFieldInfo());
4310}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004311
Calin Juravle52c48962014-12-16 17:02:57 +00004312void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4313 HandleFieldSet(instruction, instruction->GetFieldInfo());
4314}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004315
Calin Juravle52c48962014-12-16 17:02:57 +00004316void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004317 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004318}
4319
Calin Juravlee460d1d2015-09-29 04:52:17 +01004320void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4321 HUnresolvedInstanceFieldGet* instruction) {
4322 FieldAccessCallingConventionX86_64 calling_convention;
4323 codegen_->CreateUnresolvedFieldLocationSummary(
4324 instruction, instruction->GetFieldType(), calling_convention);
4325}
4326
4327void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4328 HUnresolvedInstanceFieldGet* instruction) {
4329 FieldAccessCallingConventionX86_64 calling_convention;
4330 codegen_->GenerateUnresolvedFieldAccess(instruction,
4331 instruction->GetFieldType(),
4332 instruction->GetFieldIndex(),
4333 instruction->GetDexPc(),
4334 calling_convention);
4335}
4336
4337void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4338 HUnresolvedInstanceFieldSet* instruction) {
4339 FieldAccessCallingConventionX86_64 calling_convention;
4340 codegen_->CreateUnresolvedFieldLocationSummary(
4341 instruction, instruction->GetFieldType(), calling_convention);
4342}
4343
4344void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4345 HUnresolvedInstanceFieldSet* instruction) {
4346 FieldAccessCallingConventionX86_64 calling_convention;
4347 codegen_->GenerateUnresolvedFieldAccess(instruction,
4348 instruction->GetFieldType(),
4349 instruction->GetFieldIndex(),
4350 instruction->GetDexPc(),
4351 calling_convention);
4352}
4353
4354void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4355 HUnresolvedStaticFieldGet* instruction) {
4356 FieldAccessCallingConventionX86_64 calling_convention;
4357 codegen_->CreateUnresolvedFieldLocationSummary(
4358 instruction, instruction->GetFieldType(), calling_convention);
4359}
4360
4361void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4362 HUnresolvedStaticFieldGet* instruction) {
4363 FieldAccessCallingConventionX86_64 calling_convention;
4364 codegen_->GenerateUnresolvedFieldAccess(instruction,
4365 instruction->GetFieldType(),
4366 instruction->GetFieldIndex(),
4367 instruction->GetDexPc(),
4368 calling_convention);
4369}
4370
4371void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4372 HUnresolvedStaticFieldSet* instruction) {
4373 FieldAccessCallingConventionX86_64 calling_convention;
4374 codegen_->CreateUnresolvedFieldLocationSummary(
4375 instruction, instruction->GetFieldType(), calling_convention);
4376}
4377
4378void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4379 HUnresolvedStaticFieldSet* instruction) {
4380 FieldAccessCallingConventionX86_64 calling_convention;
4381 codegen_->GenerateUnresolvedFieldAccess(instruction,
4382 instruction->GetFieldType(),
4383 instruction->GetFieldIndex(),
4384 instruction->GetDexPc(),
4385 calling_convention);
4386}
4387
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004388void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004389 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4390 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4391 ? Location::RequiresRegister()
4392 : Location::Any();
4393 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004394}
4395
Calin Juravle2ae48182016-03-16 14:05:09 +00004396void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4397 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004398 return;
4399 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004400 LocationSummary* locations = instruction->GetLocations();
4401 Location obj = locations->InAt(0);
4402
4403 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004404 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004405}
4406
Calin Juravle2ae48182016-03-16 14:05:09 +00004407void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004408 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004409 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004410
4411 LocationSummary* locations = instruction->GetLocations();
4412 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004413
4414 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004415 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004416 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004417 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004418 } else {
4419 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004420 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004421 __ jmp(slow_path->GetEntryLabel());
4422 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004423 }
4424 __ j(kEqual, slow_path->GetEntryLabel());
4425}
4426
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004427void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004428 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004429}
4430
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004431void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004432 bool object_array_get_with_read_barrier =
4433 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004434 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004435 new (GetGraph()->GetArena()) LocationSummary(instruction,
4436 object_array_get_with_read_barrier ?
4437 LocationSummary::kCallOnSlowPath :
4438 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004439 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004440 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004441 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004442 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004443 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004444 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4445 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4446 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004447 // The output overlaps for an object array get when read barriers
4448 // are enabled: we do not want the move to overwrite the array's
4449 // location, as we need it to emit the read barrier.
4450 locations->SetOut(
4451 Location::RequiresRegister(),
4452 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004453 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004454}
4455
4456void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4457 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004458 Location obj_loc = locations->InAt(0);
4459 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004460 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004461 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004462 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004463
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004464 Primitive::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004465 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004466 case Primitive::kPrimBoolean: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004467 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004468 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004469 break;
4470 }
4471
4472 case Primitive::kPrimByte: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004473 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004474 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004475 break;
4476 }
4477
4478 case Primitive::kPrimShort: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004479 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004480 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004481 break;
4482 }
4483
4484 case Primitive::kPrimChar: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004485 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004486 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004487 break;
4488 }
4489
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004490 case Primitive::kPrimInt: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004491 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004492 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004493 break;
4494 }
4495
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004496 case Primitive::kPrimNot: {
4497 static_assert(
4498 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4499 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004500 // /* HeapReference<Object> */ out =
4501 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4502 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004503 // Note that a potential implicit null check is handled in this
4504 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
4505 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004506 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004507 } else {
4508 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004509 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
4510 codegen_->MaybeRecordImplicitNullCheck(instruction);
4511 // If read barriers are enabled, emit read barriers other than
4512 // Baker's using a slow path (and also unpoison the loaded
4513 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004514 if (index.IsConstant()) {
4515 uint32_t offset =
4516 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004517 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
4518 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004519 codegen_->MaybeGenerateReadBarrierSlow(
4520 instruction, out_loc, out_loc, obj_loc, data_offset, index);
4521 }
4522 }
4523 break;
4524 }
4525
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004526 case Primitive::kPrimLong: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004527 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004528 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004529 break;
4530 }
4531
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004532 case Primitive::kPrimFloat: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004533 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004534 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004535 break;
4536 }
4537
4538 case Primitive::kPrimDouble: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004539 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004540 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004541 break;
4542 }
4543
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004544 case Primitive::kPrimVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01004545 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004546 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004547 }
Roland Levillain4d027112015-07-01 15:41:14 +01004548
4549 if (type == Primitive::kPrimNot) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004550 // Potential implicit null checks, in the case of reference
4551 // arrays, are handled in the previous switch statement.
4552 } else {
4553 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01004554 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004555}
4556
4557void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004558 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004559
4560 bool needs_write_barrier =
4561 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004562 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004563
Nicolas Geoffray39468442014-09-02 15:17:15 +01004564 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004565 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01004566 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00004567 LocationSummary::kCallOnSlowPath :
4568 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004569
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004570 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04004571 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4572 if (Primitive::IsFloatingPointType(value_type)) {
4573 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004574 } else {
4575 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
4576 }
4577
4578 if (needs_write_barrier) {
4579 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01004580 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004581 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004582 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004583}
4584
4585void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
4586 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004587 Location array_loc = locations->InAt(0);
4588 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004589 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004590 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004591 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004592 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004593 bool needs_write_barrier =
4594 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004595 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4596 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4597 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004598
4599 switch (value_type) {
4600 case Primitive::kPrimBoolean:
4601 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004602 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004603 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004604 if (value.IsRegister()) {
4605 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004606 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004607 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004608 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004609 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004610 break;
4611 }
4612
4613 case Primitive::kPrimShort:
4614 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004615 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004616 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004617 if (value.IsRegister()) {
4618 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004619 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004620 DCHECK(value.IsConstant()) << value;
4621 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004622 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004623 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004624 break;
4625 }
4626
4627 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004628 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004629 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004630
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004631 if (!value.IsRegister()) {
4632 // Just setting null.
4633 DCHECK(instruction->InputAt(2)->IsNullConstant());
4634 DCHECK(value.IsConstant()) << value;
4635 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00004636 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004637 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004638 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004639 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004640 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004641
4642 DCHECK(needs_write_barrier);
4643 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01004644 // We cannot use a NearLabel for `done`, as its range may be too
4645 // short when Baker read barriers are enabled.
4646 Label done;
4647 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004648 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01004649 Location temp_loc = locations->GetTemp(0);
4650 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004651 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004652 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86_64(instruction);
4653 codegen_->AddSlowPath(slow_path);
4654 if (instruction->GetValueCanBeNull()) {
4655 __ testl(register_value, register_value);
4656 __ j(kNotEqual, &not_null);
4657 __ movl(address, Immediate(0));
4658 codegen_->MaybeRecordImplicitNullCheck(instruction);
4659 __ jmp(&done);
4660 __ Bind(&not_null);
4661 }
4662
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004663 // Note that when Baker read barriers are enabled, the type
4664 // checks are performed without read barriers. This is fine,
4665 // even in the case where a class object is in the from-space
4666 // after the flip, as a comparison involving such a type would
4667 // not produce a false positive; it may of course produce a
4668 // false negative, in which case we would take the ArraySet
4669 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01004670
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004671 // /* HeapReference<Class> */ temp = array->klass_
4672 __ movl(temp, Address(array, class_offset));
4673 codegen_->MaybeRecordImplicitNullCheck(instruction);
4674 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01004675
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004676 // /* HeapReference<Class> */ temp = temp->component_type_
4677 __ movl(temp, Address(temp, component_offset));
4678 // If heap poisoning is enabled, no need to unpoison `temp`
4679 // nor the object reference in `register_value->klass`, as
4680 // we are comparing two poisoned references.
4681 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01004682
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004683 if (instruction->StaticTypeOfArrayIsObjectArray()) {
4684 __ j(kEqual, &do_put);
4685 // If heap poisoning is enabled, the `temp` reference has
4686 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00004687 __ MaybeUnpoisonHeapReference(temp);
4688
Roland Levillain9d6e1f82016-09-05 15:57:33 +01004689 // If heap poisoning is enabled, no need to unpoison the
4690 // heap reference loaded below, as it is only used for a
4691 // comparison with null.
4692 __ cmpl(Address(temp, super_offset), Immediate(0));
4693 __ j(kNotEqual, slow_path->GetEntryLabel());
4694 __ Bind(&do_put);
4695 } else {
4696 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004697 }
4698 }
4699
4700 if (kPoisonHeapReferences) {
4701 __ movl(temp, register_value);
4702 __ PoisonHeapReference(temp);
4703 __ movl(address, temp);
4704 } else {
4705 __ movl(address, register_value);
4706 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00004707 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004708 codegen_->MaybeRecordImplicitNullCheck(instruction);
4709 }
4710
4711 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
4712 codegen_->MarkGCCard(
4713 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
4714 __ Bind(&done);
4715
4716 if (slow_path != nullptr) {
4717 __ Bind(slow_path->GetExitLabel());
4718 }
4719
4720 break;
4721 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004722
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004723 case Primitive::kPrimInt: {
4724 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004725 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004726 if (value.IsRegister()) {
4727 __ movl(address, value.AsRegister<CpuRegister>());
4728 } else {
4729 DCHECK(value.IsConstant()) << value;
4730 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4731 __ movl(address, Immediate(v));
4732 }
4733 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004734 break;
4735 }
4736
4737 case Primitive::kPrimLong: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004738 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004739 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004740 if (value.IsRegister()) {
4741 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04004742 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004743 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004744 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004745 Address address_high =
4746 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004747 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004748 }
4749 break;
4750 }
4751
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004752 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004753 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004754 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004755 if (value.IsFpuRegister()) {
4756 __ movss(address, value.AsFpuRegister<XmmRegister>());
4757 } else {
4758 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004759 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04004760 __ movl(address, Immediate(v));
4761 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004762 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004763 break;
4764 }
4765
4766 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01004767 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004768 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04004769 if (value.IsFpuRegister()) {
4770 __ movsd(address, value.AsFpuRegister<XmmRegister>());
4771 codegen_->MaybeRecordImplicitNullCheck(instruction);
4772 } else {
4773 int64_t v =
4774 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004775 Address address_high =
4776 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04004777 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
4778 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004779 break;
4780 }
4781
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004782 case Primitive::kPrimVoid:
4783 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07004784 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004785 }
4786}
4787
4788void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004789 LocationSummary* locations =
4790 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004791 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04004792 if (!instruction->IsEmittedAtUseSite()) {
4793 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4794 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004795}
4796
4797void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04004798 if (instruction->IsEmittedAtUseSite()) {
4799 return;
4800 }
4801
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004802 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01004803 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00004804 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
4805 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004806 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004807 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004808}
4809
4810void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004811 RegisterSet caller_saves = RegisterSet::Empty();
4812 InvokeRuntimeCallingConvention calling_convention;
4813 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4814 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4815 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05004816 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04004817 HInstruction* length = instruction->InputAt(1);
4818 if (!length->IsEmittedAtUseSite()) {
4819 locations->SetInAt(1, Location::RegisterOrConstant(length));
4820 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004821}
4822
4823void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
4824 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05004825 Location index_loc = locations->InAt(0);
4826 Location length_loc = locations->InAt(1);
Mark Mendellee8d9712016-07-12 11:13:15 -04004827 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004828
Mark Mendell99dbd682015-04-22 16:18:52 -04004829 if (length_loc.IsConstant()) {
4830 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
4831 if (index_loc.IsConstant()) {
4832 // BCE will remove the bounds check if we are guarenteed to pass.
4833 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4834 if (index < 0 || index >= length) {
4835 codegen_->AddSlowPath(slow_path);
4836 __ jmp(slow_path->GetEntryLabel());
4837 } else {
4838 // Some optimization after BCE may have generated this, and we should not
4839 // generate a bounds check if it is a valid range.
4840 }
4841 return;
4842 }
4843
4844 // We have to reverse the jump condition because the length is the constant.
4845 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
4846 __ cmpl(index_reg, Immediate(length));
4847 codegen_->AddSlowPath(slow_path);
4848 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004849 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04004850 HInstruction* array_length = instruction->InputAt(1);
4851 if (array_length->IsEmittedAtUseSite()) {
4852 // Address the length field in the array.
4853 DCHECK(array_length->IsArrayLength());
4854 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
4855 Location array_loc = array_length->GetLocations()->InAt(0);
4856 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
4857 if (index_loc.IsConstant()) {
4858 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
4859 __ cmpl(array_len, Immediate(value));
4860 } else {
4861 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
4862 }
4863 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendell99dbd682015-04-22 16:18:52 -04004864 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004865 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04004866 }
4867 codegen_->AddSlowPath(slow_path);
4868 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05004869 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004870}
4871
4872void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
4873 CpuRegister card,
4874 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004875 CpuRegister value,
4876 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004877 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004878 if (value_can_be_null) {
4879 __ testl(value, value);
4880 __ j(kEqual, &is_null);
4881 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004882 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004883 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004884 __ movq(temp, object);
4885 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01004886 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004887 if (value_can_be_null) {
4888 __ Bind(&is_null);
4889 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004890}
4891
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004892void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004893 LOG(FATAL) << "Unimplemented";
4894}
4895
4896void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004897 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4898}
4899
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004900void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004901 LocationSummary* locations =
4902 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004903 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004904}
4905
4906void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004907 HBasicBlock* block = instruction->GetBlock();
4908 if (block->GetLoopInformation() != nullptr) {
4909 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4910 // The back edge will generate the suspend check.
4911 return;
4912 }
4913 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4914 // The goto will generate the suspend check.
4915 return;
4916 }
4917 GenerateSuspendCheck(instruction, nullptr);
4918}
4919
4920void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
4921 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004922 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01004923 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
4924 if (slow_path == nullptr) {
4925 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
4926 instruction->SetSlowPath(slow_path);
4927 codegen_->AddSlowPath(slow_path);
4928 if (successor != nullptr) {
4929 DCHECK(successor->IsLoopHeader());
4930 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
4931 }
4932 } else {
4933 DCHECK_EQ(slow_path->GetSuccessor(), successor);
4934 }
4935
Andreas Gampe542451c2016-07-26 09:02:02 -07004936 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004937 /* no_rip */ true),
4938 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01004939 if (successor == nullptr) {
4940 __ j(kNotEqual, slow_path->GetEntryLabel());
4941 __ Bind(slow_path->GetReturnLabel());
4942 } else {
4943 __ j(kEqual, codegen_->GetLabelOf(successor));
4944 __ jmp(slow_path->GetEntryLabel());
4945 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00004946}
4947
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004948X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
4949 return codegen_->GetAssembler();
4950}
4951
4952void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01004953 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004954 Location source = move->GetSource();
4955 Location destination = move->GetDestination();
4956
4957 if (source.IsRegister()) {
4958 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004959 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004960 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004961 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004962 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004963 } else {
4964 DCHECK(destination.IsDoubleStackSlot());
4965 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00004966 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004967 }
4968 } else if (source.IsStackSlot()) {
4969 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004970 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004971 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004972 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004973 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004974 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004975 } else {
4976 DCHECK(destination.IsStackSlot());
4977 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
4978 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
4979 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004980 } else if (source.IsDoubleStackSlot()) {
4981 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004982 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004983 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01004984 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00004985 __ movsd(destination.AsFpuRegister<XmmRegister>(),
4986 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004987 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01004988 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004989 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
4990 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
4991 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004992 } else if (source.IsConstant()) {
4993 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004994 if (constant->IsIntConstant() || constant->IsNullConstant()) {
4995 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01004996 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004997 if (value == 0) {
4998 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
4999 } else {
5000 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5001 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005002 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005003 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005004 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005005 }
5006 } else if (constant->IsLongConstant()) {
5007 int64_t value = constant->AsLongConstant()->GetValue();
5008 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005009 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005010 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005011 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005012 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005013 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005014 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005015 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005016 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005017 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005018 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005019 } else {
5020 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005021 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005022 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5023 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005024 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005025 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005026 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005027 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005028 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005029 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005030 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005031 } else {
5032 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005033 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005034 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005035 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005036 } else if (source.IsFpuRegister()) {
5037 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005038 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005039 } else if (destination.IsStackSlot()) {
5040 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005041 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005042 } else {
Nicolas Geoffray31596742014-11-24 15:28:45 +00005043 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005044 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005045 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005046 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005047 }
5048}
5049
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005050void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005051 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005052 __ movl(Address(CpuRegister(RSP), mem), reg);
5053 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005054}
5055
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005056void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005057 ScratchRegisterScope ensure_scratch(
5058 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5059
5060 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5061 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5062 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5063 Address(CpuRegister(RSP), mem2 + stack_offset));
5064 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5065 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5066 CpuRegister(ensure_scratch.GetRegister()));
5067}
5068
Mark Mendell8a1c7282015-06-29 15:41:28 -04005069void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5070 __ movq(CpuRegister(TMP), reg1);
5071 __ movq(reg1, reg2);
5072 __ movq(reg2, CpuRegister(TMP));
5073}
5074
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005075void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5076 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5077 __ movq(Address(CpuRegister(RSP), mem), reg);
5078 __ movq(reg, CpuRegister(TMP));
5079}
5080
5081void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
5082 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005083 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005084
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005085 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5086 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5087 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5088 Address(CpuRegister(RSP), mem2 + stack_offset));
5089 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5090 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5091 CpuRegister(ensure_scratch.GetRegister()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005092}
5093
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005094void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5095 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5096 __ movss(Address(CpuRegister(RSP), mem), reg);
5097 __ movd(reg, CpuRegister(TMP));
5098}
5099
5100void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5101 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5102 __ movsd(Address(CpuRegister(RSP), mem), reg);
5103 __ movd(reg, CpuRegister(TMP));
5104}
5105
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005106void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005107 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005108 Location source = move->GetSource();
5109 Location destination = move->GetDestination();
5110
5111 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005112 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005113 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005114 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005115 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005116 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005117 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005118 Exchange32(destination.GetStackIndex(), source.GetStackIndex());
5119 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005120 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005121 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005122 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005123 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
5124 Exchange64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005125 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005126 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5127 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5128 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005129 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005130 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005131 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005132 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005133 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005134 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005135 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005136 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005137 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005138 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005139 }
5140}
5141
5142
5143void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5144 __ pushq(CpuRegister(reg));
5145}
5146
5147
5148void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5149 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005150}
5151
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005152void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005153 SlowPathCode* slow_path, CpuRegister class_reg) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005154 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
5155 Immediate(mirror::Class::kStatusInitialized));
5156 __ j(kLess, slow_path->GetEntryLabel());
5157 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005158 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005159}
5160
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005161HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5162 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005163 switch (desired_class_load_kind) {
5164 case HLoadClass::LoadKind::kReferrersClass:
5165 break;
5166 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5167 DCHECK(!GetCompilerOptions().GetCompilePic());
5168 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5169 return HLoadClass::LoadKind::kBootImageLinkTimePcRelative;
5170 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5171 DCHECK(GetCompilerOptions().GetCompilePic());
5172 break;
5173 case HLoadClass::LoadKind::kBootImageAddress:
5174 break;
5175 case HLoadClass::LoadKind::kDexCacheAddress:
5176 DCHECK(Runtime::Current()->UseJitCompilation());
5177 break;
5178 case HLoadClass::LoadKind::kDexCachePcRelative:
5179 DCHECK(!Runtime::Current()->UseJitCompilation());
5180 break;
5181 case HLoadClass::LoadKind::kDexCacheViaMethod:
5182 break;
5183 }
5184 return desired_class_load_kind;
5185}
5186
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005187void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005188 if (cls->NeedsAccessCheck()) {
5189 InvokeRuntimeCallingConvention calling_convention;
5190 CodeGenerator::CreateLoadClassLocationSummary(
5191 cls,
5192 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
5193 Location::RegisterLocation(RAX),
5194 /* code_generator_supports_read_barrier */ true);
5195 return;
5196 }
5197
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005198 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5199 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005200 ? LocationSummary::kCallOnSlowPath
5201 : LocationSummary::kNoCall;
5202 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005203 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005204 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005205 }
5206
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005207 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5208 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5209 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5210 locations->SetInAt(0, Location::RequiresRegister());
5211 }
5212 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005213}
5214
5215void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005216 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01005217 if (cls->NeedsAccessCheck()) {
5218 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01005219 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005220 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01005221 return;
5222 }
5223
Roland Levillain0d5a2812015-11-13 10:07:31 +00005224 Location out_loc = locations->Out();
5225 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005226
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005227 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005228 bool generate_null_check = false;
5229 switch (cls->GetLoadKind()) {
5230 case HLoadClass::LoadKind::kReferrersClass: {
5231 DCHECK(!cls->CanCallRuntime());
5232 DCHECK(!cls->MustGenerateClinitCheck());
5233 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5234 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5235 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005236 cls,
5237 out_loc,
5238 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
5239 /*fixup_label*/nullptr,
5240 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005241 break;
5242 }
5243 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005244 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005245 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5246 codegen_->RecordTypePatch(cls);
5247 break;
5248 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005249 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005250 DCHECK_NE(cls->GetAddress(), 0u);
5251 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
5252 __ movl(out, Immediate(address)); // Zero-extended.
5253 codegen_->RecordSimplePatch();
5254 break;
5255 }
5256 case HLoadClass::LoadKind::kDexCacheAddress: {
5257 DCHECK_NE(cls->GetAddress(), 0u);
5258 // /* GcRoot<mirror::Class> */ out = *address
5259 if (IsUint<32>(cls->GetAddress())) {
5260 Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005261 GenerateGcRootFieldLoad(cls,
5262 out_loc,
5263 address,
5264 /*fixup_label*/nullptr,
5265 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005266 } else {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005267 // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address).
5268 __ movq(out, Immediate(cls->GetAddress()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005269 GenerateGcRootFieldLoad(cls,
5270 out_loc,
5271 Address(out, 0),
5272 /*fixup_label*/nullptr,
5273 requires_read_barrier);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00005274 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005275 generate_null_check = !cls->IsInDexCache();
5276 break;
5277 }
5278 case HLoadClass::LoadKind::kDexCachePcRelative: {
5279 uint32_t offset = cls->GetDexCacheElementOffset();
5280 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
5281 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5282 /* no_rip */ false);
5283 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005284 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005285 generate_null_check = !cls->IsInDexCache();
5286 break;
5287 }
5288 case HLoadClass::LoadKind::kDexCacheViaMethod: {
5289 // /* GcRoot<mirror::Class>[] */ out =
5290 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
5291 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5292 __ movq(out,
5293 Address(current_method,
5294 ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
5295 // /* GcRoot<mirror::Class> */ out = out[type_index]
5296 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005297 cls,
5298 out_loc,
5299 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
5300 /*fixup_label*/nullptr,
5301 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005302 generate_null_check = !cls->IsInDexCache();
5303 break;
5304 }
5305 default:
5306 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5307 UNREACHABLE();
5308 }
5309
5310 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5311 DCHECK(cls->CanCallRuntime());
5312 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
5313 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5314 codegen_->AddSlowPath(slow_path);
5315 if (generate_null_check) {
5316 __ testl(out, out);
5317 __ j(kEqual, slow_path->GetEntryLabel());
5318 }
5319 if (cls->MustGenerateClinitCheck()) {
5320 GenerateClassInitializationCheck(slow_path, out);
5321 } else {
5322 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005323 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005324 }
5325}
5326
5327void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5328 LocationSummary* locations =
5329 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
5330 locations->SetInAt(0, Location::RequiresRegister());
5331 if (check->HasUses()) {
5332 locations->SetOut(Location::SameAsFirstInput());
5333 }
5334}
5335
5336void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005337 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07005338 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005339 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005340 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005341 GenerateClassInitializationCheck(slow_path,
5342 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005343}
5344
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005345HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5346 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005347 switch (desired_string_load_kind) {
5348 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
5349 DCHECK(!GetCompilerOptions().GetCompilePic());
5350 // We prefer the always-available RIP-relative address for the x86-64 boot image.
5351 return HLoadString::LoadKind::kBootImageLinkTimePcRelative;
5352 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
5353 DCHECK(GetCompilerOptions().GetCompilePic());
5354 break;
5355 case HLoadString::LoadKind::kBootImageAddress:
5356 break;
5357 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01005358 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005359 break;
5360 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01005361 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005362 break;
5363 case HLoadString::LoadKind::kDexCacheViaMethod:
5364 break;
5365 }
5366 return desired_string_load_kind;
5367}
5368
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005369void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005370 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
5371 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005372 : LocationSummary::kNoCall;
5373 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005374 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
5375 locations->SetInAt(0, Location::RequiresRegister());
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005376 locations->SetOut(Location::RegisterLocation(RAX));
5377 } else {
5378 locations->SetOut(Location::RequiresRegister());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005379 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005380}
5381
5382void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01005383 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005384 Location out_loc = locations->Out();
5385 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005386
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005387 switch (load->GetLoadKind()) {
5388 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005389 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
5390 codegen_->RecordStringPatch(load);
5391 return; // No dex cache slow path.
5392 }
5393 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005394 DCHECK_NE(load->GetAddress(), 0u);
5395 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
5396 __ movl(out, Immediate(address)); // Zero-extended.
5397 codegen_->RecordSimplePatch();
5398 return; // No dex cache slow path.
5399 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005400 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005401 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005402 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005403
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005404 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005405 InvokeRuntimeCallingConvention calling_convention;
5406 __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
5407 Immediate(load->GetStringIndex()));
5408 codegen_->InvokeRuntime(kQuickResolveString,
5409 load,
5410 load->GetDexPc());
5411 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005412}
5413
David Brazdilcb1c0552015-08-04 16:22:25 +01005414static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005415 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005416 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01005417}
5418
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005419void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
5420 LocationSummary* locations =
5421 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5422 locations->SetOut(Location::RequiresRegister());
5423}
5424
5425void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005426 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
5427}
5428
5429void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
5430 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5431}
5432
5433void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5434 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005435}
5436
5437void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
5438 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005439 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005440 InvokeRuntimeCallingConvention calling_convention;
5441 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5442}
5443
5444void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005445 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005446 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00005447}
5448
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005449static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
5450 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00005451 !kUseBakerReadBarrier &&
5452 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005453 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5454 type_check_kind == TypeCheckKind::kArrayObjectCheck);
5455}
5456
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005457void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005458 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005459 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01005460 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00005461 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005462 case TypeCheckKind::kExactCheck:
5463 case TypeCheckKind::kAbstractClassCheck:
5464 case TypeCheckKind::kClassHierarchyCheck:
5465 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005466 call_kind =
5467 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01005468 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005469 break;
5470 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005471 case TypeCheckKind::kUnresolvedCheck:
5472 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005473 call_kind = LocationSummary::kCallOnSlowPath;
5474 break;
5475 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005476
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005477 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01005478 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005479 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005480 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005481 locations->SetInAt(0, Location::RequiresRegister());
5482 locations->SetInAt(1, Location::Any());
5483 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
5484 locations->SetOut(Location::RequiresRegister());
5485 // When read barriers are enabled, we need a temporary register for
5486 // some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005487 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005488 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005489 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005490}
5491
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005492void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005493 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005494 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005495 Location obj_loc = locations->InAt(0);
5496 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005497 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005498 Location out_loc = locations->Out();
5499 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005500 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005501 locations->GetTemp(0) :
5502 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005503 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005504 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5505 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5506 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07005507 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005508 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005509
5510 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005511 // Avoid null check if we know obj is not null.
5512 if (instruction->MustDoNullCheck()) {
5513 __ testl(obj, obj);
5514 __ j(kEqual, &zero);
5515 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005516
Roland Levillain0d5a2812015-11-13 10:07:31 +00005517 // /* HeapReference<Class> */ out = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005518 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005519
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005520 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005521 case TypeCheckKind::kExactCheck: {
5522 if (cls.IsRegister()) {
5523 __ cmpl(out, cls.AsRegister<CpuRegister>());
5524 } else {
5525 DCHECK(cls.IsStackSlot()) << cls;
5526 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5527 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005528 if (zero.IsLinked()) {
5529 // Classes must be equal for the instanceof to succeed.
5530 __ j(kNotEqual, &zero);
5531 __ movl(out, Immediate(1));
5532 __ jmp(&done);
5533 } else {
5534 __ setcc(kEqual, out);
5535 // setcc only sets the low byte.
5536 __ andl(out, Immediate(1));
5537 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005538 break;
5539 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005540
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005541 case TypeCheckKind::kAbstractClassCheck: {
5542 // If the class is abstract, we eagerly fetch the super class of the
5543 // object to avoid doing a comparison we know will fail.
5544 NearLabel loop, success;
5545 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005546 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005547 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005548 __ testl(out, out);
5549 // If `out` is null, we use it for the result, and jump to `done`.
5550 __ j(kEqual, &done);
5551 if (cls.IsRegister()) {
5552 __ cmpl(out, cls.AsRegister<CpuRegister>());
5553 } else {
5554 DCHECK(cls.IsStackSlot()) << cls;
5555 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5556 }
5557 __ j(kNotEqual, &loop);
5558 __ movl(out, Immediate(1));
5559 if (zero.IsLinked()) {
5560 __ jmp(&done);
5561 }
5562 break;
5563 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005564
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005565 case TypeCheckKind::kClassHierarchyCheck: {
5566 // Walk over the class hierarchy to find a match.
5567 NearLabel loop, success;
5568 __ Bind(&loop);
5569 if (cls.IsRegister()) {
5570 __ cmpl(out, cls.AsRegister<CpuRegister>());
5571 } else {
5572 DCHECK(cls.IsStackSlot()) << cls;
5573 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5574 }
5575 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005576 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005577 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005578 __ testl(out, out);
5579 __ j(kNotEqual, &loop);
5580 // If `out` is null, we use it for the result, and jump to `done`.
5581 __ jmp(&done);
5582 __ Bind(&success);
5583 __ movl(out, Immediate(1));
5584 if (zero.IsLinked()) {
5585 __ jmp(&done);
5586 }
5587 break;
5588 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005589
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005590 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005591 // Do an exact check.
5592 NearLabel exact_check;
5593 if (cls.IsRegister()) {
5594 __ cmpl(out, cls.AsRegister<CpuRegister>());
5595 } else {
5596 DCHECK(cls.IsStackSlot()) << cls;
5597 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5598 }
5599 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005600 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005601 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005602 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005603 __ testl(out, out);
5604 // If `out` is null, we use it for the result, and jump to `done`.
5605 __ j(kEqual, &done);
5606 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
5607 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005608 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005609 __ movl(out, Immediate(1));
5610 __ jmp(&done);
5611 break;
5612 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005613
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005614 case TypeCheckKind::kArrayCheck: {
5615 if (cls.IsRegister()) {
5616 __ cmpl(out, cls.AsRegister<CpuRegister>());
5617 } else {
5618 DCHECK(cls.IsStackSlot()) << cls;
5619 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
5620 }
5621 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005622 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5623 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005624 codegen_->AddSlowPath(slow_path);
5625 __ j(kNotEqual, slow_path->GetEntryLabel());
5626 __ movl(out, Immediate(1));
5627 if (zero.IsLinked()) {
5628 __ jmp(&done);
5629 }
5630 break;
5631 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005632
Calin Juravle98893e12015-10-02 21:05:03 +01005633 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005634 case TypeCheckKind::kInterfaceCheck: {
5635 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005636 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00005637 // cases.
5638 //
5639 // We cannot directly call the InstanceofNonTrivial runtime
5640 // entry point without resorting to a type checking slow path
5641 // here (i.e. by calling InvokeRuntime directly), as it would
5642 // require to assign fixed registers for the inputs of this
5643 // HInstanceOf instruction (following the runtime calling
5644 // convention), which might be cluttered by the potential first
5645 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005646 //
5647 // TODO: Introduce a new runtime entry point taking the object
5648 // to test (instead of its class) as argument, and let it deal
5649 // with the read barrier issues. This will let us refactor this
5650 // case of the `switch` code as it was previously (with a direct
5651 // call to the runtime not using a type checking slow path).
5652 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005653 DCHECK(locations->OnlyCallsOnSlowPath());
5654 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5655 /* is_fatal */ false);
5656 codegen_->AddSlowPath(slow_path);
5657 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005658 if (zero.IsLinked()) {
5659 __ jmp(&done);
5660 }
5661 break;
5662 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005663 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005664
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005665 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005666 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005667 __ xorl(out, out);
5668 }
5669
5670 if (done.IsLinked()) {
5671 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01005672 }
5673
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005674 if (slow_path != nullptr) {
5675 __ Bind(slow_path->GetExitLabel());
5676 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00005677}
5678
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005679void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005680 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5681 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005682 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
5683 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005684 case TypeCheckKind::kExactCheck:
5685 case TypeCheckKind::kAbstractClassCheck:
5686 case TypeCheckKind::kClassHierarchyCheck:
5687 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005688 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
5689 LocationSummary::kCallOnSlowPath :
5690 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005691 break;
5692 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00005693 case TypeCheckKind::kUnresolvedCheck:
5694 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005695 call_kind = LocationSummary::kCallOnSlowPath;
5696 break;
5697 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005698 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
5699 locations->SetInAt(0, Location::RequiresRegister());
5700 locations->SetInAt(1, Location::Any());
5701 // Note that TypeCheckSlowPathX86_64 uses this "temp" register too.
5702 locations->AddTemp(Location::RequiresRegister());
5703 // When read barriers are enabled, we need an additional temporary
5704 // register for some cases.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005705 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005706 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005707 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005708}
5709
5710void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005711 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005712 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005713 Location obj_loc = locations->InAt(0);
5714 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005715 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005716 Location temp_loc = locations->GetTemp(0);
5717 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005718 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005719 locations->GetTemp(1) :
5720 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005721 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5722 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5723 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5724 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005725
Roland Levillain0d5a2812015-11-13 10:07:31 +00005726 bool is_type_check_slow_path_fatal =
5727 (type_check_kind == TypeCheckKind::kExactCheck ||
5728 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
5729 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
5730 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
5731 !instruction->CanThrowIntoCatchBlock();
5732 SlowPathCode* type_check_slow_path =
5733 new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction,
5734 is_type_check_slow_path_fatal);
5735 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005736
Roland Levillain0d5a2812015-11-13 10:07:31 +00005737 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005738 case TypeCheckKind::kExactCheck:
5739 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005740 NearLabel done;
5741 // Avoid null check if we know obj is not null.
5742 if (instruction->MustDoNullCheck()) {
5743 __ testl(obj, obj);
5744 __ j(kEqual, &done);
5745 }
5746
5747 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005748 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005749
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005750 if (cls.IsRegister()) {
5751 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5752 } else {
5753 DCHECK(cls.IsStackSlot()) << cls;
5754 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5755 }
5756 // Jump to slow path for throwing the exception or doing a
5757 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005758 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005759 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005760 break;
5761 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005762
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005763 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005764 NearLabel done;
5765 // Avoid null check if we know obj is not null.
5766 if (instruction->MustDoNullCheck()) {
5767 __ testl(obj, obj);
5768 __ j(kEqual, &done);
5769 }
5770
5771 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005772 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005773
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005774 // If the class is abstract, we eagerly fetch the super class of the
5775 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005776 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005777 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005778 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005779 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005780
5781 // If the class reference currently in `temp` is not null, jump
5782 // to the `compare_classes` label to compare it with the checked
5783 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005784 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005785 __ j(kNotEqual, &compare_classes);
5786 // Otherwise, jump to the slow path to throw the exception.
5787 //
5788 // But before, move back the object's class into `temp` before
5789 // going into the slow path, as it has been overwritten in the
5790 // meantime.
5791 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005792 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005793 __ jmp(type_check_slow_path->GetEntryLabel());
5794
5795 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005796 if (cls.IsRegister()) {
5797 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5798 } else {
5799 DCHECK(cls.IsStackSlot()) << cls;
5800 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5801 }
5802 __ j(kNotEqual, &loop);
Roland Levillain86503782016-02-11 19:07:30 +00005803 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005804 break;
5805 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005806
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005807 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005808 NearLabel done;
5809 // Avoid null check if we know obj is not null.
5810 if (instruction->MustDoNullCheck()) {
5811 __ testl(obj, obj);
5812 __ j(kEqual, &done);
5813 }
5814
5815 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005816 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005817
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005818 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005819 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005820 __ Bind(&loop);
5821 if (cls.IsRegister()) {
5822 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5823 } else {
5824 DCHECK(cls.IsStackSlot()) << cls;
5825 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5826 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005827 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005828
Roland Levillain0d5a2812015-11-13 10:07:31 +00005829 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005830 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005831
5832 // If the class reference currently in `temp` is not null, jump
5833 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005834 __ testl(temp, temp);
5835 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005836 // Otherwise, jump to the slow path to throw the exception.
5837 //
5838 // But before, move back the object's class into `temp` before
5839 // going into the slow path, as it has been overwritten in the
5840 // meantime.
5841 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005842 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005843 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005844 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005845 break;
5846 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005847
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005848 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00005849 // We cannot use a NearLabel here, as its range might be too
5850 // short in some cases when read barriers are enabled. This has
5851 // been observed for instance when the code emitted for this
5852 // case uses high x86-64 registers (R8-R15).
5853 Label done;
5854 // Avoid null check if we know obj is not null.
5855 if (instruction->MustDoNullCheck()) {
5856 __ testl(obj, obj);
5857 __ j(kEqual, &done);
5858 }
5859
5860 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005861 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005862
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005863 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005864 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01005865 if (cls.IsRegister()) {
5866 __ cmpl(temp, cls.AsRegister<CpuRegister>());
5867 } else {
5868 DCHECK(cls.IsStackSlot()) << cls;
5869 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
5870 }
5871 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005872
5873 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005874 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00005875 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005876
5877 // If the component type is not null (i.e. the object is indeed
5878 // an array), jump to label `check_non_primitive_component_type`
5879 // to further check that this component type is not a primitive
5880 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005881 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005882 __ j(kNotEqual, &check_non_primitive_component_type);
5883 // Otherwise, jump to the slow path to throw the exception.
5884 //
5885 // But before, move back the object's class into `temp` before
5886 // going into the slow path, as it has been overwritten in the
5887 // meantime.
5888 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005889 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005890 __ jmp(type_check_slow_path->GetEntryLabel());
5891
5892 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005893 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00005894 __ j(kEqual, &done);
5895 // Same comment as above regarding `temp` and the slow path.
5896 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005897 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005898 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005899 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005900 break;
5901 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005902
Calin Juravle98893e12015-10-02 21:05:03 +01005903 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005904 case TypeCheckKind::kInterfaceCheck:
Roland Levillain86503782016-02-11 19:07:30 +00005905 NearLabel done;
5906 // Avoid null check if we know obj is not null.
5907 if (instruction->MustDoNullCheck()) {
5908 __ testl(obj, obj);
5909 __ j(kEqual, &done);
5910 }
5911
5912 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00005913 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain86503782016-02-11 19:07:30 +00005914
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005915 // We always go into the type check slow path for the unresolved
5916 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005917 //
5918 // We cannot directly call the CheckCast runtime entry point
5919 // without resorting to a type checking slow path here (i.e. by
5920 // calling InvokeRuntime directly), as it would require to
5921 // assign fixed registers for the inputs of this HInstanceOf
5922 // instruction (following the runtime calling convention), which
5923 // might be cluttered by the potential first read barrier
5924 // emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005925 //
5926 // TODO: Introduce a new runtime entry point taking the object
5927 // to test (instead of its class) as argument, and let it deal
5928 // with the read barrier issues. This will let us refactor this
5929 // case of the `switch` code as it was previously (with a direct
5930 // call to the runtime not using a type checking slow path).
5931 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005932 __ jmp(type_check_slow_path->GetEntryLabel());
Roland Levillain86503782016-02-11 19:07:30 +00005933 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005934 break;
5935 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00005936
Roland Levillain0d5a2812015-11-13 10:07:31 +00005937 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00005938}
5939
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005940void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
5941 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005942 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005943 InvokeRuntimeCallingConvention calling_convention;
5944 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5945}
5946
5947void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01005948 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01005949 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01005950 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005951 if (instruction->IsEnter()) {
5952 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5953 } else {
5954 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5955 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00005956}
5957
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005958void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
5959void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
5960void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
5961
5962void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5963 LocationSummary* locations =
5964 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5965 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
5966 || instruction->GetResultType() == Primitive::kPrimLong);
5967 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005968 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005969 locations->SetOut(Location::SameAsFirstInput());
5970}
5971
5972void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
5973 HandleBitwiseOperation(instruction);
5974}
5975
5976void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
5977 HandleBitwiseOperation(instruction);
5978}
5979
5980void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
5981 HandleBitwiseOperation(instruction);
5982}
5983
5984void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
5985 LocationSummary* locations = instruction->GetLocations();
5986 Location first = locations->InAt(0);
5987 Location second = locations->InAt(1);
5988 DCHECK(first.Equals(locations->Out()));
5989
5990 if (instruction->GetResultType() == Primitive::kPrimInt) {
5991 if (second.IsRegister()) {
5992 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005993 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005994 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005995 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005996 } else {
5997 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005998 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00005999 }
6000 } else if (second.IsConstant()) {
6001 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6002 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006003 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006004 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006005 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006006 } else {
6007 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006008 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006009 }
6010 } else {
6011 Address address(CpuRegister(RSP), second.GetStackIndex());
6012 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006013 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006014 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006015 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006016 } else {
6017 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006018 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006019 }
6020 }
6021 } else {
6022 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006023 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6024 bool second_is_constant = false;
6025 int64_t value = 0;
6026 if (second.IsConstant()) {
6027 second_is_constant = true;
6028 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006029 }
Mark Mendell40741f32015-04-20 22:10:34 -04006030 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006031
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006032 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006033 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006034 if (is_int32_value) {
6035 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6036 } else {
6037 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6038 }
6039 } else if (second.IsDoubleStackSlot()) {
6040 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006041 } else {
6042 __ andq(first_reg, second.AsRegister<CpuRegister>());
6043 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006044 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006045 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006046 if (is_int32_value) {
6047 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6048 } else {
6049 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6050 }
6051 } else if (second.IsDoubleStackSlot()) {
6052 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006053 } else {
6054 __ orq(first_reg, second.AsRegister<CpuRegister>());
6055 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006056 } else {
6057 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006058 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006059 if (is_int32_value) {
6060 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6061 } else {
6062 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6063 }
6064 } else if (second.IsDoubleStackSlot()) {
6065 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006066 } else {
6067 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6068 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006069 }
6070 }
6071}
6072
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006073void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6074 Location out,
6075 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006076 Location maybe_temp) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006077 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6078 if (kEmitCompilerReadBarrier) {
6079 if (kUseBakerReadBarrier) {
6080 // Load with fast path based Baker's read barrier.
6081 // /* HeapReference<Object> */ out = *(out + offset)
6082 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006083 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006084 } else {
6085 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006086 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006087 // in the following move operation, as we will need it for the
6088 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006089 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006090 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006091 // /* HeapReference<Object> */ out = *(out + offset)
6092 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006093 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006094 }
6095 } else {
6096 // Plain load with no read barrier.
6097 // /* HeapReference<Object> */ out = *(out + offset)
6098 __ movl(out_reg, Address(out_reg, offset));
6099 __ MaybeUnpoisonHeapReference(out_reg);
6100 }
6101}
6102
6103void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6104 Location out,
6105 Location obj,
Vladimir Marko953437b2016-08-24 08:30:46 +00006106 uint32_t offset) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006107 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6108 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
6109 if (kEmitCompilerReadBarrier) {
6110 if (kUseBakerReadBarrier) {
6111 // Load with fast path based Baker's read barrier.
6112 // /* HeapReference<Object> */ out = *(obj + offset)
6113 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006114 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006115 } else {
6116 // Load with slow path based read barrier.
6117 // /* HeapReference<Object> */ out = *(obj + offset)
6118 __ movl(out_reg, Address(obj_reg, offset));
6119 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6120 }
6121 } else {
6122 // Plain load with no read barrier.
6123 // /* HeapReference<Object> */ out = *(obj + offset)
6124 __ movl(out_reg, Address(obj_reg, offset));
6125 __ MaybeUnpoisonHeapReference(out_reg);
6126 }
6127}
6128
6129void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction,
6130 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006131 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006132 Label* fixup_label,
6133 bool requires_read_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006134 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006135 if (requires_read_barrier) {
6136 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006137 if (kUseBakerReadBarrier) {
6138 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6139 // Baker's read barrier are used:
6140 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006141 // root = *address;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006142 // if (Thread::Current()->GetIsGcMarking()) {
6143 // root = ReadBarrier::Mark(root)
6144 // }
6145
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006146 // /* GcRoot<mirror::Object> */ root = *address
6147 __ movl(root_reg, address);
6148 if (fixup_label != nullptr) {
6149 __ Bind(fixup_label);
6150 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006151 static_assert(
6152 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6153 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6154 "have different sizes.");
6155 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6156 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6157 "have different sizes.");
6158
Vladimir Marko953437b2016-08-24 08:30:46 +00006159 // Slow path marking the GC root `root`.
6160 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6161 instruction, root, /* unpoison */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006162 codegen_->AddSlowPath(slow_path);
6163
Andreas Gampe542451c2016-07-26 09:02:02 -07006164 __ gs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006165 /* no_rip */ true),
6166 Immediate(0));
6167 __ j(kNotEqual, slow_path->GetEntryLabel());
6168 __ Bind(slow_path->GetExitLabel());
6169 } else {
6170 // GC root loaded through a slow path for read barriers other
6171 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006172 // /* GcRoot<mirror::Object>* */ root = address
6173 __ leaq(root_reg, address);
6174 if (fixup_label != nullptr) {
6175 __ Bind(fixup_label);
6176 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006177 // /* mirror::Object* */ root = root->Read()
6178 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6179 }
6180 } else {
6181 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006182 // /* GcRoot<mirror::Object> */ root = *address
6183 __ movl(root_reg, address);
6184 if (fixup_label != nullptr) {
6185 __ Bind(fixup_label);
6186 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006187 // Note that GC roots are not affected by heap poisoning, thus we
6188 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006189 }
6190}
6191
6192void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6193 Location ref,
6194 CpuRegister obj,
6195 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006196 bool needs_null_check) {
6197 DCHECK(kEmitCompilerReadBarrier);
6198 DCHECK(kUseBakerReadBarrier);
6199
6200 // /* HeapReference<Object> */ ref = *(obj + offset)
6201 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006202 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006203}
6204
6205void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6206 Location ref,
6207 CpuRegister obj,
6208 uint32_t data_offset,
6209 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006210 bool needs_null_check) {
6211 DCHECK(kEmitCompilerReadBarrier);
6212 DCHECK(kUseBakerReadBarrier);
6213
Roland Levillain3d312422016-06-23 13:53:42 +01006214 static_assert(
6215 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6216 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006217 // /* HeapReference<Object> */ ref =
6218 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006219 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006220 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006221}
6222
6223void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6224 Location ref,
6225 CpuRegister obj,
6226 const Address& src,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006227 bool needs_null_check) {
6228 DCHECK(kEmitCompilerReadBarrier);
6229 DCHECK(kUseBakerReadBarrier);
6230
6231 // In slow path based read barriers, the read barrier call is
6232 // inserted after the original load. However, in fast path based
6233 // Baker's read barriers, we need to perform the load of
6234 // mirror::Object::monitor_ *before* the original reference load.
6235 // This load-load ordering is required by the read barrier.
6236 // The fast path/slow path (for Baker's algorithm) should look like:
6237 //
6238 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6239 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6240 // HeapReference<Object> ref = *src; // Original reference load.
6241 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
6242 // if (is_gray) {
6243 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6244 // }
6245 //
6246 // Note: the original implementation in ReadBarrier::Barrier is
6247 // slightly more complex as:
6248 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006249 // the high-bits of rb_state, which are expected to be all zeroes
6250 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6251 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006252 // - it performs additional checks that we do not do here for
6253 // performance reasons.
6254
6255 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006256 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6257
Vladimir Marko953437b2016-08-24 08:30:46 +00006258 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6259 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
6260 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
6261 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
6262 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
6263 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
6264 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
6265
6266 // if (rb_state == ReadBarrier::gray_ptr_)
6267 // ref = ReadBarrier::Mark(ref);
6268 // At this point, just do the "if" and make sure that flags are preserved until the branch.
6269 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006270 if (needs_null_check) {
6271 MaybeRecordImplicitNullCheck(instruction);
6272 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006273
6274 // Load fence to prevent load-load reordering.
6275 // Note that this is a no-op, thanks to the x86-64 memory model.
6276 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6277
6278 // The actual reference load.
6279 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00006280 __ movl(ref_reg, src); // Flags are unaffected.
6281
6282 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
6283 // Slow path marking the object `ref` when it is gray.
6284 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86_64(
6285 instruction, ref, /* unpoison */ true);
6286 AddSlowPath(slow_path);
6287
6288 // We have done the "if" of the gray bit check above, now branch based on the flags.
6289 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006290
6291 // Object* ref = ref_addr->AsMirrorPtr()
6292 __ MaybeUnpoisonHeapReference(ref_reg);
6293
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006294 __ Bind(slow_path->GetExitLabel());
6295}
6296
6297void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
6298 Location out,
6299 Location ref,
6300 Location obj,
6301 uint32_t offset,
6302 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006303 DCHECK(kEmitCompilerReadBarrier);
6304
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006305 // Insert a slow path based read barrier *after* the reference load.
6306 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006307 // If heap poisoning is enabled, the unpoisoning of the loaded
6308 // reference will be carried out by the runtime within the slow
6309 // path.
6310 //
6311 // Note that `ref` currently does not get unpoisoned (when heap
6312 // poisoning is enabled), which is alright as the `ref` argument is
6313 // not used by the artReadBarrierSlow entry point.
6314 //
6315 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6316 SlowPathCode* slow_path = new (GetGraph()->GetArena())
6317 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
6318 AddSlowPath(slow_path);
6319
Roland Levillain0d5a2812015-11-13 10:07:31 +00006320 __ jmp(slow_path->GetEntryLabel());
6321 __ Bind(slow_path->GetExitLabel());
6322}
6323
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006324void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6325 Location out,
6326 Location ref,
6327 Location obj,
6328 uint32_t offset,
6329 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006330 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006331 // Baker's read barriers shall be handled by the fast path
6332 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
6333 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006334 // If heap poisoning is enabled, unpoisoning will be taken care of
6335 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006336 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006337 } else if (kPoisonHeapReferences) {
6338 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
6339 }
6340}
6341
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006342void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6343 Location out,
6344 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006345 DCHECK(kEmitCompilerReadBarrier);
6346
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006347 // Insert a slow path based read barrier *after* the GC root load.
6348 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00006349 // Note that GC roots are not affected by heap poisoning, so we do
6350 // not need to do anything special for this here.
6351 SlowPathCode* slow_path =
6352 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
6353 AddSlowPath(slow_path);
6354
Roland Levillain0d5a2812015-11-13 10:07:31 +00006355 __ jmp(slow_path->GetEntryLabel());
6356 __ Bind(slow_path->GetExitLabel());
6357}
6358
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006359void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006360 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006361 LOG(FATAL) << "Unreachable";
6362}
6363
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006364void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006365 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006366 LOG(FATAL) << "Unreachable";
6367}
6368
Mark Mendellfe57faa2015-09-18 09:26:15 -04006369// Simple implementation of packed switch - generate cascaded compare/jumps.
6370void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6371 LocationSummary* locations =
6372 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6373 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04006374 locations->AddTemp(Location::RequiresRegister());
6375 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04006376}
6377
6378void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6379 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006380 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006381 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04006382 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
6383 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
6384 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006385 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6386
6387 // Should we generate smaller inline compare/jumps?
6388 if (num_entries <= kPackedSwitchJumpTableThreshold) {
6389 // Figure out the correct compare values and jump conditions.
6390 // Handle the first compare/branch as a special case because it might
6391 // jump to the default case.
6392 DCHECK_GT(num_entries, 2u);
6393 Condition first_condition;
6394 uint32_t index;
6395 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
6396 if (lower_bound != 0) {
6397 first_condition = kLess;
6398 __ cmpl(value_reg_in, Immediate(lower_bound));
6399 __ j(first_condition, codegen_->GetLabelOf(default_block));
6400 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
6401
6402 index = 1;
6403 } else {
6404 // Handle all the compare/jumps below.
6405 first_condition = kBelow;
6406 index = 0;
6407 }
6408
6409 // Handle the rest of the compare/jumps.
6410 for (; index + 1 < num_entries; index += 2) {
6411 int32_t compare_to_value = lower_bound + index + 1;
6412 __ cmpl(value_reg_in, Immediate(compare_to_value));
6413 // Jump to successors[index] if value < case_value[index].
6414 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
6415 // Jump to successors[index + 1] if value == case_value[index + 1].
6416 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
6417 }
6418
6419 if (index != num_entries) {
6420 // There are an odd number of entries. Handle the last one.
6421 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00006422 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006423 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
6424 }
6425
6426 // And the default for any other value.
6427 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6428 __ jmp(codegen_->GetLabelOf(default_block));
6429 }
6430 return;
6431 }
Mark Mendell9c86b482015-09-18 13:36:07 -04006432
6433 // Remove the bias, if needed.
6434 Register value_reg_out = value_reg_in.AsRegister();
6435 if (lower_bound != 0) {
6436 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
6437 value_reg_out = temp_reg.AsRegister();
6438 }
6439 CpuRegister value_reg(value_reg_out);
6440
6441 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04006442 __ cmpl(value_reg, Immediate(num_entries - 1));
6443 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006444
Mark Mendell9c86b482015-09-18 13:36:07 -04006445 // We are in the range of the table.
6446 // Load the address of the jump table in the constant area.
6447 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006448
Mark Mendell9c86b482015-09-18 13:36:07 -04006449 // Load the (signed) offset from the jump table.
6450 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
6451
6452 // Add the offset to the address of the table base.
6453 __ addq(temp_reg, base_reg);
6454
6455 // And jump.
6456 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006457}
6458
Aart Bikc5d47542016-01-27 17:00:35 -08006459void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
6460 if (value == 0) {
6461 __ xorl(dest, dest);
6462 } else {
6463 __ movl(dest, Immediate(value));
6464 }
6465}
6466
Mark Mendell92e83bf2015-05-07 11:25:03 -04006467void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
6468 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08006469 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006470 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00006471 } else if (IsUint<32>(value)) {
6472 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04006473 __ movl(dest, Immediate(static_cast<int32_t>(value)));
6474 } else {
6475 __ movq(dest, Immediate(value));
6476 }
6477}
6478
Mark Mendell7c0b44f2016-02-01 10:08:35 -05006479void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
6480 if (value == 0) {
6481 __ xorps(dest, dest);
6482 } else {
6483 __ movss(dest, LiteralInt32Address(value));
6484 }
6485}
6486
6487void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
6488 if (value == 0) {
6489 __ xorpd(dest, dest);
6490 } else {
6491 __ movsd(dest, LiteralInt64Address(value));
6492 }
6493}
6494
6495void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
6496 Load32BitValue(dest, bit_cast<int32_t, float>(value));
6497}
6498
6499void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
6500 Load64BitValue(dest, bit_cast<int64_t, double>(value));
6501}
6502
Aart Bika19616e2016-02-01 18:57:58 -08006503void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
6504 if (value == 0) {
6505 __ testl(dest, dest);
6506 } else {
6507 __ cmpl(dest, Immediate(value));
6508 }
6509}
6510
6511void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
6512 if (IsInt<32>(value)) {
6513 if (value == 0) {
6514 __ testq(dest, dest);
6515 } else {
6516 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
6517 }
6518 } else {
6519 // Value won't fit in an int.
6520 __ cmpq(dest, LiteralInt64Address(value));
6521 }
6522}
6523
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006524void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
6525 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6526 if (rhs.IsConstant()) {
6527 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
6528 Compare32BitValue(lhs_reg, value);
6529 } else if (rhs.IsStackSlot()) {
6530 __ cmpl(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6531 } else {
6532 __ cmpl(lhs_reg, rhs.AsRegister<CpuRegister>());
6533 }
6534}
6535
6536void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
6537 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
6538 if (rhs.IsConstant()) {
6539 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
6540 Compare64BitValue(lhs_reg, value);
6541 } else if (rhs.IsDoubleStackSlot()) {
6542 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
6543 } else {
6544 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
6545 }
6546}
6547
6548Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
6549 Location index,
6550 ScaleFactor scale,
6551 uint32_t data_offset) {
6552 return index.IsConstant() ?
6553 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
6554 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
6555}
6556
Mark Mendellcfa410b2015-05-25 16:02:44 -04006557void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
6558 DCHECK(dest.IsDoubleStackSlot());
6559 if (IsInt<32>(value)) {
6560 // Can move directly as an int32 constant.
6561 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
6562 Immediate(static_cast<int32_t>(value)));
6563 } else {
6564 Load64BitValue(CpuRegister(TMP), value);
6565 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
6566 }
6567}
6568
Mark Mendell9c86b482015-09-18 13:36:07 -04006569/**
6570 * Class to handle late fixup of offsets into constant area.
6571 */
6572class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
6573 public:
6574 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
6575 : codegen_(&codegen), offset_into_constant_area_(offset) {}
6576
6577 protected:
6578 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
6579
6580 CodeGeneratorX86_64* codegen_;
6581
6582 private:
6583 void Process(const MemoryRegion& region, int pos) OVERRIDE {
6584 // Patch the correct offset for the instruction. We use the address of the
6585 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
6586 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
6587 int32_t relative_position = constant_offset - pos;
6588
6589 // Patch in the right value.
6590 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
6591 }
6592
6593 // Location in constant area that the fixup refers to.
6594 size_t offset_into_constant_area_;
6595};
6596
6597/**
6598 t * Class to handle late fixup of offsets to a jump table that will be created in the
6599 * constant area.
6600 */
6601class JumpTableRIPFixup : public RIPFixup {
6602 public:
6603 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
6604 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
6605
6606 void CreateJumpTable() {
6607 X86_64Assembler* assembler = codegen_->GetAssembler();
6608
6609 // Ensure that the reference to the jump table has the correct offset.
6610 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
6611 SetOffset(offset_in_constant_table);
6612
6613 // Compute the offset from the start of the function to this jump table.
6614 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
6615
6616 // Populate the jump table with the correct values for the jump table.
6617 int32_t num_entries = switch_instr_->GetNumEntries();
6618 HBasicBlock* block = switch_instr_->GetBlock();
6619 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
6620 // The value that we want is the target offset - the position of the table.
6621 for (int32_t i = 0; i < num_entries; i++) {
6622 HBasicBlock* b = successors[i];
6623 Label* l = codegen_->GetLabelOf(b);
6624 DCHECK(l->IsBound());
6625 int32_t offset_to_block = l->Position() - current_table_offset;
6626 assembler->AppendInt32(offset_to_block);
6627 }
6628 }
6629
6630 private:
6631 const HPackedSwitch* switch_instr_;
6632};
6633
Mark Mendellf55c3e02015-03-26 21:07:46 -04006634void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
6635 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04006636 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04006637 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
6638 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04006639 assembler->Align(4, 0);
6640 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04006641
6642 // Populate any jump tables.
6643 for (auto jump_table : fixups_to_jump_tables_) {
6644 jump_table->CreateJumpTable();
6645 }
6646
6647 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04006648 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04006649 }
6650
6651 // And finish up.
6652 CodeGenerator::Finalize(allocator);
6653}
6654
Mark Mendellf55c3e02015-03-26 21:07:46 -04006655Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
6656 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
6657 return Address::RIP(fixup);
6658}
6659
6660Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
6661 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
6662 return Address::RIP(fixup);
6663}
6664
6665Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
6666 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
6667 return Address::RIP(fixup);
6668}
6669
6670Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
6671 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
6672 return Address::RIP(fixup);
6673}
6674
Andreas Gampe85b62f22015-09-09 13:15:38 -07006675// TODO: trg as memory.
6676void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, Primitive::Type type) {
6677 if (!trg.IsValid()) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006678 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07006679 return;
6680 }
6681
6682 DCHECK_NE(type, Primitive::kPrimVoid);
6683
6684 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
6685 if (trg.Equals(return_loc)) {
6686 return;
6687 }
6688
6689 // Let the parallel move resolver take care of all of this.
6690 HParallelMove parallel_move(GetGraph()->GetArena());
6691 parallel_move.AddMove(return_loc, trg, type, nullptr);
6692 GetMoveResolver()->EmitNativeCode(&parallel_move);
6693}
6694
Mark Mendell9c86b482015-09-18 13:36:07 -04006695Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
6696 // Create a fixup to be used to create and address the jump table.
6697 JumpTableRIPFixup* table_fixup =
6698 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
6699
6700 // We have to populate the jump tables.
6701 fixups_to_jump_tables_.push_back(table_fixup);
6702 return Address::RIP(table_fixup);
6703}
6704
Mark Mendellea5af682015-10-22 17:35:49 -04006705void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
6706 const Address& addr_high,
6707 int64_t v,
6708 HInstruction* instruction) {
6709 if (IsInt<32>(v)) {
6710 int32_t v_32 = v;
6711 __ movq(addr_low, Immediate(v_32));
6712 MaybeRecordImplicitNullCheck(instruction);
6713 } else {
6714 // Didn't fit in a register. Do it in pieces.
6715 int32_t low_v = Low32Bits(v);
6716 int32_t high_v = High32Bits(v);
6717 __ movl(addr_low, Immediate(low_v));
6718 MaybeRecordImplicitNullCheck(instruction);
6719 __ movl(addr_high, Immediate(high_v));
6720 }
6721}
6722
Roland Levillain4d027112015-07-01 15:41:14 +01006723#undef __
6724
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01006725} // namespace x86_64
6726} // namespace art