blob: 05194b15d5fafc3e84b54300864b55b96a096f8e [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070025#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080026#include "intrinsics.h"
27#include "intrinsics_x86_64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010028#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070029#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070030#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070031#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010032#include "mirror/object_reference.h"
33#include "thread.h"
34#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010035#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "utils/x86_64/assembler_x86_64.h"
37#include "utils/x86_64/managed_register_x86_64.h"
38
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039namespace art {
40
Roland Levillain0d5a2812015-11-13 10:07:31 +000041template<class MirrorType>
42class GcRoot;
43
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010044namespace x86_64 {
45
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010046static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010047static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000048// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
49// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
50// generates less code/data with a small num_entries.
51static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010052
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000053static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000054static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010055
Mark Mendell24f2dfa2015-01-14 19:51:45 -050056static constexpr int kC2ConditionMask = 0x400;
57
Roland Levillain7cbd27f2016-08-11 23:53:33 +010058// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
59#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070060#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061
Andreas Gampe85b62f22015-09-09 13:15:38 -070062class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010063 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000064 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010065
Alexandre Rames2ed20af2015-03-06 13:55:35 +000066 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000067 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010068 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000069 if (instruction_->CanThrowIntoCatchBlock()) {
70 // Live registers will be restored in the catch block if caught.
71 SaveLiveRegisters(codegen, instruction_->GetLocations());
72 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010073 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000074 instruction_,
75 instruction_->GetDexPc(),
76 this);
Roland Levillain888d0672015-11-23 18:53:50 +000077 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 }
79
Alexandre Rames8158f282015-08-07 10:26:17 +010080 bool IsFatal() const OVERRIDE { return true; }
81
Alexandre Rames9931f312015-06-19 14:47:01 +010082 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86_64"; }
83
Nicolas Geoffraye5038322014-07-04 09:41:32 +010084 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010085 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
86};
87
Andreas Gampe85b62f22015-09-09 13:15:38 -070088class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000089 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000090 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000091
Alexandre Rames2ed20af2015-03-06 13:55:35 +000092 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +000093 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000094 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010095 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000096 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000097 }
98
Alexandre Rames8158f282015-08-07 10:26:17 +010099 bool IsFatal() const OVERRIDE { return true; }
100
Alexandre Rames9931f312015-06-19 14:47:01 +0100101 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86_64"; }
102
Calin Juravled0d48522014-11-04 16:40:20 +0000103 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000104 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
105};
106
Andreas Gampe85b62f22015-09-09 13:15:38 -0700107class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000108 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100109 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000110 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000111
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000113 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100114 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000115 if (is_div_) {
116 __ negl(cpu_reg_);
117 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400118 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000119 }
120
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000121 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100122 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negq(cpu_reg_);
125 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400126 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000128 }
Calin Juravled0d48522014-11-04 16:40:20 +0000129 __ jmp(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86_64"; }
133
Calin Juravled0d48522014-11-04 16:40:20 +0000134 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100136 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 const bool is_div_;
138 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000139};
140
Andreas Gampe85b62f22015-09-09 13:15:38 -0700141class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000142 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100143 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000144 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000145
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000146 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bikb13c65b2017-03-21 20:14:07 -0700147 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000148 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000149 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700150 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100151 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000152 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700153 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100154 if (successor_ == nullptr) {
155 __ jmp(GetReturnLabel());
156 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000157 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100158 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 }
160
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100161 Label* GetReturnLabel() {
162 DCHECK(successor_ == nullptr);
163 return &return_label_;
164 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000165
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100166 HBasicBlock* GetSuccessor() const {
167 return successor_;
168 }
169
Alexandre Rames9931f312015-06-19 14:47:01 +0100170 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86_64"; }
171
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000172 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100173 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000174 Label return_label_;
175
176 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
177};
178
Andreas Gampe85b62f22015-09-09 13:15:38 -0700179class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100180 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100181 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000182 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100183
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000184 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100185 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000186 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100187 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000188 if (instruction_->CanThrowIntoCatchBlock()) {
189 // Live registers will be restored in the catch block if caught.
190 SaveLiveRegisters(codegen, instruction_->GetLocations());
191 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400192 // Are we using an array length from memory?
193 HInstruction* array_length = instruction_->InputAt(1);
194 Location length_loc = locations->InAt(1);
195 InvokeRuntimeCallingConvention calling_convention;
196 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
197 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100198 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100199 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400200 Location array_loc = array_length->GetLocations()->InAt(0);
201 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
202 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
203 // Check for conflicts with index.
204 if (length_loc.Equals(locations->InAt(0))) {
205 // We know we aren't using parameter 2.
206 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
207 }
208 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100209 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100210 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700211 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400212 }
213
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000214 // We're moving two locations to locations that could overlap, so we need a parallel
215 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000216 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100217 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000218 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100219 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400220 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100221 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100222 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100223 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
224 ? kQuickThrowStringBounds
225 : kQuickThrowArrayBounds;
226 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100227 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000228 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100229 }
230
Alexandre Rames8158f282015-08-07 10:26:17 +0100231 bool IsFatal() const OVERRIDE { return true; }
232
Alexandre Rames9931f312015-06-19 14:47:01 +0100233 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86_64"; }
234
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100235 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100236 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
237};
238
Andreas Gampe85b62f22015-09-09 13:15:38 -0700239class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100240 public:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000241 LoadClassSlowPathX86_64(HLoadClass* cls,
242 HInstruction* at,
243 uint32_t dex_pc,
244 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000245 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000246 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
247 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000249 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000250 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000251 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100252 __ Bind(GetEntryLabel());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100253
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000254 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255
Vladimir Markoea4c1262017-02-06 19:59:33 +0000256 // Custom calling convention: RAX serves as both input and output.
257 __ movl(CpuRegister(RAX), Immediate(cls_->GetTypeIndex().index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100258 x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000259 instruction_,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000260 dex_pc_,
261 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000262 if (do_clinit_) {
263 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
264 } else {
265 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
266 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100267
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000268 Location out = locations->Out();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000270 if (out.IsValid()) {
271 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000272 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000273 }
274
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000275 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100276 __ jmp(GetExitLabel());
277 }
278
Alexandre Rames9931f312015-06-19 14:47:01 +0100279 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86_64"; }
280
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100281 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000282 // The class this slow path will load.
283 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100284
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000285 // The dex PC of `at_`.
286 const uint32_t dex_pc_;
287
288 // Whether to initialize the class.
289 const bool do_clinit_;
290
291 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100292};
293
Vladimir Markoaad75c62016-10-03 08:46:48 +0000294class LoadStringSlowPathX86_64 : public SlowPathCode {
295 public:
296 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
297
298 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
299 LocationSummary* locations = instruction_->GetLocations();
300 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
301
302 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
303 __ Bind(GetEntryLabel());
304 SaveLiveRegisters(codegen, locations);
305
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000306 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100307 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000308 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000309 x86_64_codegen->InvokeRuntime(kQuickResolveString,
310 instruction_,
311 instruction_->GetDexPc(),
312 this);
313 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
314 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
315 RestoreLiveRegisters(codegen, locations);
316
Vladimir Markoaad75c62016-10-03 08:46:48 +0000317 __ jmp(GetExitLabel());
318 }
319
320 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; }
321
322 private:
323 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
324};
325
Andreas Gampe85b62f22015-09-09 13:15:38 -0700326class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000327 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000329 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000330
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000331 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100333 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000334 DCHECK(instruction_->IsCheckCast()
335 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336
Roland Levillain0d5a2812015-11-13 10:07:31 +0000337 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000340 if (kPoisonHeapReferences &&
341 instruction_->IsCheckCast() &&
342 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
343 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
344 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
345 }
346
Vladimir Marko87584542017-12-12 17:47:52 +0000347 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000348 SaveLiveRegisters(codegen, locations);
349 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000350
351 // We're moving two locations to locations that could overlap, so we need a parallel
352 // move resolver.
353 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800354 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800355 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100356 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800357 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800358 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100359 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000360 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100361 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000363 } else {
364 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800365 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
366 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000367 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000368
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000369 if (!is_fatal_) {
370 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000371 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000372 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000373
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 RestoreLiveRegisters(codegen, locations);
375 __ jmp(GetExitLabel());
376 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000377 }
378
Alexandre Rames9931f312015-06-19 14:47:01 +0100379 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
380
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000381 bool IsFatal() const OVERRIDE { return is_fatal_; }
382
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000383 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000384 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000385
386 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
387};
388
Andreas Gampe85b62f22015-09-09 13:15:38 -0700389class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700390 public:
Aart Bik42249c32016-01-07 15:33:50 -0800391 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393
394 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000395 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700396 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100397 LocationSummary* locations = instruction_->GetLocations();
398 SaveLiveRegisters(codegen, locations);
399 InvokeRuntimeCallingConvention calling_convention;
400 x86_64_codegen->Load32BitValue(
401 CpuRegister(calling_convention.GetRegisterAt(0)),
402 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100403 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700405 }
406
Alexandre Rames9931f312015-06-19 14:47:01 +0100407 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86_64"; }
408
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700409 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700410 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
411};
412
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100413class ArraySetSlowPathX86_64 : public SlowPathCode {
414 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000415 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100416
417 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
418 LocationSummary* locations = instruction_->GetLocations();
419 __ Bind(GetEntryLabel());
420 SaveLiveRegisters(codegen, locations);
421
422 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100423 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424 parallel_move.AddMove(
425 locations->InAt(0),
426 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100427 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100428 nullptr);
429 parallel_move.AddMove(
430 locations->InAt(1),
431 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100432 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100433 nullptr);
434 parallel_move.AddMove(
435 locations->InAt(2),
436 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100437 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100438 nullptr);
439 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
440
Roland Levillain0d5a2812015-11-13 10:07:31 +0000441 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100442 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000443 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 RestoreLiveRegisters(codegen, locations);
445 __ jmp(GetExitLabel());
446 }
447
448 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86_64"; }
449
450 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100451 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
452};
453
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100454// Slow path marking an object reference `ref` during a read
455// barrier. The field `obj.field` in the object `obj` holding this
456// reference does not get updated by this slow path after marking (see
457// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
458//
459// This means that after the execution of this slow path, `ref` will
460// always be up-to-date, but `obj.field` may not; i.e., after the
461// flip, `ref` will be a to-space reference, but `obj.field` will
462// probably still be a from-space reference (unless it gets updated by
463// another thread, or if another thread installed another object
464// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000465class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
466 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100467 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
468 Location ref,
469 bool unpoison_ref_before_marking)
470 : SlowPathCode(instruction),
471 ref_(ref),
472 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000473 DCHECK(kEmitCompilerReadBarrier);
474 }
475
476 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86_64"; }
477
478 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
479 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
481 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000482 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100483 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(instruction_->IsInstanceFieldGet() ||
485 instruction_->IsStaticFieldGet() ||
486 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100487 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488 instruction_->IsLoadClass() ||
489 instruction_->IsLoadString() ||
490 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100491 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100492 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
493 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000494 << "Unexpected instruction in read barrier marking slow path: "
495 << instruction_->DebugName();
496
497 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100498 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000499 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100500 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000501 }
Roland Levillain4359e612016-07-20 11:32:19 +0100502 // No need to save live registers; it's taken care of by the
503 // entrypoint. Also, there is no need to update the stack mask,
504 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100506 DCHECK_NE(ref_reg, RSP);
507 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100508 // "Compact" slow path, saving two moves.
509 //
510 // Instead of using the standard runtime calling convention (input
511 // and output in R0):
512 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100514 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100515 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100516 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100518 // of a dedicated entrypoint:
519 //
520 // rX <- ReadBarrierMarkRegX(rX)
521 //
522 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100523 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100524 // This runtime call does not require a stack map.
525 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000526 __ jmp(GetExitLabel());
527 }
528
529 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100530 // The location (register) of the marked object reference.
531 const Location ref_;
532 // Should the reference in `ref_` be unpoisoned prior to marking it?
533 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000534
535 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
536};
537
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100538// Slow path marking an object reference `ref` during a read barrier,
539// and if needed, atomically updating the field `obj.field` in the
540// object `obj` holding this reference after marking (contrary to
541// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
542// `obj.field`).
543//
544// This means that after the execution of this slow path, both `ref`
545// and `obj.field` will be up-to-date; i.e., after the flip, both will
546// hold the same to-space reference (unless another thread installed
547// another object reference (different from `ref`) in `obj.field`).
548class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
549 public:
550 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
551 Location ref,
552 CpuRegister obj,
553 const Address& field_addr,
554 bool unpoison_ref_before_marking,
555 CpuRegister temp1,
556 CpuRegister temp2)
557 : SlowPathCode(instruction),
558 ref_(ref),
559 obj_(obj),
560 field_addr_(field_addr),
561 unpoison_ref_before_marking_(unpoison_ref_before_marking),
562 temp1_(temp1),
563 temp2_(temp2) {
564 DCHECK(kEmitCompilerReadBarrier);
565 }
566
567 const char* GetDescription() const OVERRIDE {
568 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
569 }
570
571 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
572 LocationSummary* locations = instruction_->GetLocations();
573 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
574 Register ref_reg = ref_cpu_reg.AsRegister();
575 DCHECK(locations->CanCall());
576 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
577 // This slow path is only used by the UnsafeCASObject intrinsic.
578 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
579 << "Unexpected instruction in read barrier marking and field updating slow path: "
580 << instruction_->DebugName();
581 DCHECK(instruction_->GetLocations()->Intrinsified());
582 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
583
584 __ Bind(GetEntryLabel());
585 if (unpoison_ref_before_marking_) {
586 // Object* ref = ref_addr->AsMirrorPtr()
587 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
588 }
589
590 // Save the old (unpoisoned) reference.
591 __ movl(temp1_, ref_cpu_reg);
592
593 // No need to save live registers; it's taken care of by the
594 // entrypoint. Also, there is no need to update the stack mask,
595 // as this runtime call will not trigger a garbage collection.
596 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
597 DCHECK_NE(ref_reg, RSP);
598 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
599 // "Compact" slow path, saving two moves.
600 //
601 // Instead of using the standard runtime calling convention (input
602 // and output in R0):
603 //
604 // RDI <- ref
605 // RAX <- ReadBarrierMark(RDI)
606 // ref <- RAX
607 //
608 // we just use rX (the register containing `ref`) as input and output
609 // of a dedicated entrypoint:
610 //
611 // rX <- ReadBarrierMarkRegX(rX)
612 //
613 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100614 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100615 // This runtime call does not require a stack map.
616 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
617
618 // If the new reference is different from the old reference,
619 // update the field in the holder (`*field_addr`).
620 //
621 // Note that this field could also hold a different object, if
622 // another thread had concurrently changed it. In that case, the
623 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
624 // operation below would abort the CAS, leaving the field as-is.
625 NearLabel done;
626 __ cmpl(temp1_, ref_cpu_reg);
627 __ j(kEqual, &done);
628
629 // Update the the holder's field atomically. This may fail if
630 // mutator updates before us, but it's OK. This is achived
631 // using a strong compare-and-set (CAS) operation with relaxed
632 // memory synchronization ordering, where the expected value is
633 // the old reference and the desired value is the new reference.
634 // This operation is implemented with a 32-bit LOCK CMPXLCHG
635 // instruction, which requires the expected value (the old
636 // reference) to be in EAX. Save RAX beforehand, and move the
637 // expected value (stored in `temp1_`) into EAX.
638 __ movq(temp2_, CpuRegister(RAX));
639 __ movl(CpuRegister(RAX), temp1_);
640
641 // Convenience aliases.
642 CpuRegister base = obj_;
643 CpuRegister expected = CpuRegister(RAX);
644 CpuRegister value = ref_cpu_reg;
645
646 bool base_equals_value = (base.AsRegister() == value.AsRegister());
647 Register value_reg = ref_reg;
648 if (kPoisonHeapReferences) {
649 if (base_equals_value) {
650 // If `base` and `value` are the same register location, move
651 // `value_reg` to a temporary register. This way, poisoning
652 // `value_reg` won't invalidate `base`.
653 value_reg = temp1_.AsRegister();
654 __ movl(CpuRegister(value_reg), base);
655 }
656
657 // Check that the register allocator did not assign the location
658 // of `expected` (RAX) to `value` nor to `base`, so that heap
659 // poisoning (when enabled) works as intended below.
660 // - If `value` were equal to `expected`, both references would
661 // be poisoned twice, meaning they would not be poisoned at
662 // all, as heap poisoning uses address negation.
663 // - If `base` were equal to `expected`, poisoning `expected`
664 // would invalidate `base`.
665 DCHECK_NE(value_reg, expected.AsRegister());
666 DCHECK_NE(base.AsRegister(), expected.AsRegister());
667
668 __ PoisonHeapReference(expected);
669 __ PoisonHeapReference(CpuRegister(value_reg));
670 }
671
672 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
673
674 // If heap poisoning is enabled, we need to unpoison the values
675 // that were poisoned earlier.
676 if (kPoisonHeapReferences) {
677 if (base_equals_value) {
678 // `value_reg` has been moved to a temporary register, no need
679 // to unpoison it.
680 } else {
681 __ UnpoisonHeapReference(CpuRegister(value_reg));
682 }
683 // No need to unpoison `expected` (RAX), as it is be overwritten below.
684 }
685
686 // Restore RAX.
687 __ movq(CpuRegister(RAX), temp2_);
688
689 __ Bind(&done);
690 __ jmp(GetExitLabel());
691 }
692
693 private:
694 // The location (register) of the marked object reference.
695 const Location ref_;
696 // The register containing the object holding the marked object reference field.
697 const CpuRegister obj_;
698 // The address of the marked reference field. The base of this address must be `obj_`.
699 const Address field_addr_;
700
701 // Should the reference in `ref_` be unpoisoned prior to marking it?
702 const bool unpoison_ref_before_marking_;
703
704 const CpuRegister temp1_;
705 const CpuRegister temp2_;
706
707 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
708};
709
Roland Levillain0d5a2812015-11-13 10:07:31 +0000710// Slow path generating a read barrier for a heap reference.
711class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
712 public:
713 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
714 Location out,
715 Location ref,
716 Location obj,
717 uint32_t offset,
718 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000719 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000720 out_(out),
721 ref_(ref),
722 obj_(obj),
723 offset_(offset),
724 index_(index) {
725 DCHECK(kEmitCompilerReadBarrier);
726 // If `obj` is equal to `out` or `ref`, it means the initial
727 // object has been overwritten by (or after) the heap object
728 // reference load to be instrumented, e.g.:
729 //
730 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000731 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000732 //
733 // In that case, we have lost the information about the original
734 // object, and the emitted read barrier cannot work properly.
735 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
736 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
737}
738
739 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
740 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
741 LocationSummary* locations = instruction_->GetLocations();
742 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
743 DCHECK(locations->CanCall());
744 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100745 DCHECK(instruction_->IsInstanceFieldGet() ||
746 instruction_->IsStaticFieldGet() ||
747 instruction_->IsArrayGet() ||
748 instruction_->IsInstanceOf() ||
749 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700750 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000751 << "Unexpected instruction in read barrier for heap reference slow path: "
752 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000753
754 __ Bind(GetEntryLabel());
755 SaveLiveRegisters(codegen, locations);
756
757 // We may have to change the index's value, but as `index_` is a
758 // constant member (like other "inputs" of this slow path),
759 // introduce a copy of it, `index`.
760 Location index = index_;
761 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100762 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000763 if (instruction_->IsArrayGet()) {
764 // Compute real offset and store it in index_.
765 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
766 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
767 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
768 // We are about to change the value of `index_reg` (see the
769 // calls to art::x86_64::X86_64Assembler::shll and
770 // art::x86_64::X86_64Assembler::AddImmediate below), but it
771 // has not been saved by the previous call to
772 // art::SlowPathCode::SaveLiveRegisters, as it is a
773 // callee-save register --
774 // art::SlowPathCode::SaveLiveRegisters does not consider
775 // callee-save registers, as it has been designed with the
776 // assumption that callee-save registers are supposed to be
777 // handled by the called function. So, as a callee-save
778 // register, `index_reg` _would_ eventually be saved onto
779 // the stack, but it would be too late: we would have
780 // changed its value earlier. Therefore, we manually save
781 // it here into another freely available register,
782 // `free_reg`, chosen of course among the caller-save
783 // registers (as a callee-save `free_reg` register would
784 // exhibit the same problem).
785 //
786 // Note we could have requested a temporary register from
787 // the register allocator instead; but we prefer not to, as
788 // this is a slow path, and we know we can find a
789 // caller-save register that is available.
790 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
791 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
792 index_reg = free_reg;
793 index = Location::RegisterLocation(index_reg);
794 } else {
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
797 // (as it is not a callee-save register), so we can freely
798 // use it.
799 }
800 // Shifting the index value contained in `index_reg` by the
801 // scale factor (2) cannot overflow in practice, as the
802 // runtime is unable to allocate object arrays with a size
803 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
804 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
805 static_assert(
806 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
807 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
808 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
809 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100810 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
811 // intrinsics, `index_` is not shifted by a scale factor of 2
812 // (as in the case of ArrayGet), as it is actually an offset
813 // to an object field within an object.
814 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000815 DCHECK(instruction_->GetLocations()->Intrinsified());
816 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
817 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
818 << instruction_->AsInvoke()->GetIntrinsic();
819 DCHECK_EQ(offset_, 0U);
820 DCHECK(index_.IsRegister());
821 }
822 }
823
824 // We're moving two or three locations to locations that could
825 // overlap, so we need a parallel move resolver.
826 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100827 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000828 parallel_move.AddMove(ref_,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000831 nullptr);
832 parallel_move.AddMove(obj_,
833 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100834 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000835 nullptr);
836 if (index.IsValid()) {
837 parallel_move.AddMove(index,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000840 nullptr);
841 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
842 } else {
843 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
844 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
845 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100846 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000847 instruction_,
848 instruction_->GetDexPc(),
849 this);
850 CheckEntrypointTypes<
851 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
852 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
853
854 RestoreLiveRegisters(codegen, locations);
855 __ jmp(GetExitLabel());
856 }
857
858 const char* GetDescription() const OVERRIDE {
859 return "ReadBarrierForHeapReferenceSlowPathX86_64";
860 }
861
862 private:
863 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
864 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
865 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
866 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
867 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
868 return static_cast<CpuRegister>(i);
869 }
870 }
871 // We shall never fail to find a free caller-save register, as
872 // there are more than two core caller-save registers on x86-64
873 // (meaning it is possible to find one which is different from
874 // `ref` and `obj`).
875 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
876 LOG(FATAL) << "Could not find a free caller-save register";
877 UNREACHABLE();
878 }
879
Roland Levillain0d5a2812015-11-13 10:07:31 +0000880 const Location out_;
881 const Location ref_;
882 const Location obj_;
883 const uint32_t offset_;
884 // An additional location containing an index to an array.
885 // Only used for HArrayGet and the UnsafeGetObject &
886 // UnsafeGetObjectVolatile intrinsics.
887 const Location index_;
888
889 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
890};
891
892// Slow path generating a read barrier for a GC root.
893class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
894 public:
895 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000896 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000897 DCHECK(kEmitCompilerReadBarrier);
898 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000899
900 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
901 LocationSummary* locations = instruction_->GetLocations();
902 DCHECK(locations->CanCall());
903 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000904 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
905 << "Unexpected instruction in read barrier for GC root slow path: "
906 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000907
908 __ Bind(GetEntryLabel());
909 SaveLiveRegisters(codegen, locations);
910
911 InvokeRuntimeCallingConvention calling_convention;
912 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
913 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100914 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000915 instruction_,
916 instruction_->GetDexPc(),
917 this);
918 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
919 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
920
921 RestoreLiveRegisters(codegen, locations);
922 __ jmp(GetExitLabel());
923 }
924
925 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86_64"; }
926
927 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000928 const Location out_;
929 const Location root_;
930
931 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
932};
933
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100934#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100935// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
936#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100937
Roland Levillain4fa13f62015-07-06 18:11:54 +0100938inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700939 switch (cond) {
940 case kCondEQ: return kEqual;
941 case kCondNE: return kNotEqual;
942 case kCondLT: return kLess;
943 case kCondLE: return kLessEqual;
944 case kCondGT: return kGreater;
945 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700946 case kCondB: return kBelow;
947 case kCondBE: return kBelowEqual;
948 case kCondA: return kAbove;
949 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700950 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100951 LOG(FATAL) << "Unreachable";
952 UNREACHABLE();
953}
954
Aart Bike9f37602015-10-09 11:15:55 -0700955// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100956inline Condition X86_64FPCondition(IfCondition cond) {
957 switch (cond) {
958 case kCondEQ: return kEqual;
959 case kCondNE: return kNotEqual;
960 case kCondLT: return kBelow;
961 case kCondLE: return kBelowEqual;
962 case kCondGT: return kAbove;
963 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700964 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800965 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100966 LOG(FATAL) << "Unreachable";
967 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700968}
969
Vladimir Markodc151b22015-10-15 18:02:30 +0100970HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
971 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100972 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000973 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100974}
975
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100976void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
977 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800978 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000979
Vladimir Marko58155012015-08-19 12:49:41 +0000980 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
981 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100982 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000983 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100984 uint32_t offset =
985 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
986 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000987 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100988 }
Vladimir Marko58155012015-08-19 12:49:41 +0000989 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000990 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +0000991 break;
Vladimir Marko65979462017-05-19 17:25:12 +0100992 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
993 DCHECK(GetCompilerOptions().IsBootImage());
994 __ leal(temp.AsRegister<CpuRegister>(),
995 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000996 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +0100997 break;
Vladimir Marko58155012015-08-19 12:49:41 +0000998 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Vladimir Marko2d73f332017-03-16 15:55:49 +0000999 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
Vladimir Marko58155012015-08-19 12:49:41 +00001000 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001001 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1002 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1003 __ movl(temp.AsRegister<CpuRegister>(),
1004 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001005 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001006 break;
1007 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001008 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001009 __ movq(temp.AsRegister<CpuRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00001010 Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001011 RecordMethodBssEntryPatch(invoke);
Vladimir Marko58155012015-08-19 12:49:41 +00001012 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001013 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001014 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1015 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1016 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001017 }
Vladimir Marko58155012015-08-19 12:49:41 +00001018 }
1019
1020 switch (invoke->GetCodePtrLocation()) {
1021 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1022 __ call(&frame_entry_label_);
1023 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001024 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1025 // (callee_method + offset_of_quick_compiled_code)()
1026 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1027 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001028 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001029 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001030 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001031 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001032
1033 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001034}
1035
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001036void CodeGeneratorX86_64::GenerateVirtualCall(
1037 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001038 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1039 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1040 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001041
1042 // Use the calling convention instead of the location of the receiver, as
1043 // intrinsics may have put the receiver in a different register. In the intrinsics
1044 // slow path, the arguments have been moved to the right place, so here we are
1045 // guaranteed that the receiver is the first register of the calling convention.
1046 InvokeDexCallingConvention calling_convention;
1047 Register receiver = calling_convention.GetRegisterAt(0);
1048
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001049 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001050 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001051 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001052 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001053 // Instead of simply (possibly) unpoisoning `temp` here, we should
1054 // emit a read barrier for the previous class reference load.
1055 // However this is not required in practice, as this is an
1056 // intermediate/temporary reference and because the current
1057 // concurrent copying collector keeps the from-space memory
1058 // intact/accessible until the end of the marking phase (the
1059 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001060 __ MaybeUnpoisonHeapReference(temp);
1061 // temp = temp->GetMethodAt(method_offset);
1062 __ movq(temp, Address(temp, method_offset));
1063 // call temp->GetEntryPoint();
1064 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001065 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001066 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001067}
1068
Vladimir Markob066d432018-01-03 13:14:37 +00001069void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
1070 boot_image_method_patches_.emplace_back(/* target_dex_file */ nullptr, boot_image_offset);
1071 __ Bind(&boot_image_method_patches_.back().label);
1072}
1073
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001074void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1075 boot_image_method_patches_.emplace_back(
1076 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001077 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001078}
1079
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001080void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1081 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1082 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001083}
1084
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001085void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1086 boot_image_type_patches_.emplace_back(
1087 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001088 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001089}
1090
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001091Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001092 type_bss_entry_patches_.emplace_back(
1093 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001094 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001095}
1096
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001097void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1098 boot_image_string_patches_.emplace_back(
1099 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1100 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001101}
1102
Vladimir Markoaad75c62016-10-03 08:46:48 +00001103Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
1104 DCHECK(!GetCompilerOptions().IsBootImage());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001105 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001106 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001107 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001108}
1109
Vladimir Markoaad75c62016-10-03 08:46:48 +00001110// The label points to the end of the "movl" or another instruction but the literal offset
1111// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1112constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1113
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001114template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001115inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1116 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001117 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001118 for (const PatchInfo<Label>& info : infos) {
1119 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1120 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001121 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001122 }
1123}
1124
Vladimir Markob066d432018-01-03 13:14:37 +00001125linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset,
1126 const DexFile* target_dex_file,
1127 uint32_t pc_insn_offset,
1128 uint32_t boot_image_offset) {
1129 DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null.
1130 return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset);
1131}
1132
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001133void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001134 DCHECK(linker_patches->empty());
1135 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001136 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001137 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001138 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001139 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001140 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001141 string_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001142 linker_patches->reserve(size);
Vladimir Marko764d4542017-05-16 10:31:41 +01001143 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001144 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1145 boot_image_method_patches_, linker_patches);
1146 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1147 boot_image_type_patches_, linker_patches);
1148 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001149 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001150 } else {
Vladimir Markob066d432018-01-03 13:14:37 +00001151 EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>(
1152 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001153 DCHECK(boot_image_type_patches_.empty());
1154 DCHECK(boot_image_string_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001155 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001156 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1157 method_bss_entry_patches_, linker_patches);
1158 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1159 type_bss_entry_patches_, linker_patches);
1160 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1161 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001162 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001163}
1164
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001165void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001166 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001167}
1168
1169void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001170 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001171}
1172
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001173size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1174 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1175 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001176}
1177
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001178size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1179 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1180 return kX86_64WordSize;
1181}
1182
1183size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001184 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001185 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001186 } else {
1187 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1188 }
1189 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001190}
1191
1192size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001193 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001194 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001195 } else {
1196 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1197 }
1198 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001199}
1200
Calin Juravle175dc732015-08-25 15:42:32 +01001201void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1202 HInstruction* instruction,
1203 uint32_t dex_pc,
1204 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001205 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001206 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1207 if (EntrypointRequiresStackMap(entrypoint)) {
1208 RecordPcInfo(instruction, dex_pc, slow_path);
1209 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001210}
1211
Roland Levillaindec8f632016-07-22 17:10:06 +01001212void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1213 HInstruction* instruction,
1214 SlowPathCode* slow_path) {
1215 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001216 GenerateInvokeRuntime(entry_point_offset);
1217}
1218
1219void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001220 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip */ true));
1221}
1222
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001223static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001224// Use a fake return address register to mimic Quick.
1225static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001226CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001227 const X86_64InstructionSetFeatures& isa_features,
1228 const CompilerOptions& compiler_options,
1229 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001230 : CodeGenerator(graph,
1231 kNumberOfCpuRegisters,
1232 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001233 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001234 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1235 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001236 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001237 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1238 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001239 compiler_options,
1240 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001241 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001242 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001243 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001244 move_resolver_(graph->GetAllocator(), this),
1245 assembler_(graph->GetAllocator()),
Mark Mendellf55c3e02015-03-26 21:07:46 -04001246 isa_features_(isa_features),
Vladimir Marko58155012015-08-19 12:49:41 +00001247 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001248 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1249 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1250 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1251 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001252 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001253 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1254 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1255 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1256 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001257 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1258}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001259
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001260InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1261 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001262 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001263 assembler_(codegen->GetAssembler()),
1264 codegen_(codegen) {}
1265
David Brazdil58282f42016-01-14 12:45:10 +00001266void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001267 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001268 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001269
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001270 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001271 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001272}
1273
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001274static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001275 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001276}
David Srbecky9d8606d2015-04-12 09:35:32 +01001277
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001278static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001279 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001280}
1281
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001282void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001283 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001284 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001285 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001286 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001287 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001288
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001289 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1290 __ addw(Address(CpuRegister(kMethodRegisterArgument),
1291 ArtMethod::HotnessCountOffset().Int32Value()),
1292 Immediate(1));
1293 }
1294
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001295 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001296 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1297 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001298 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001299 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001300
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001301 if (HasEmptyFrame()) {
1302 return;
1303 }
1304
Nicolas Geoffray98893962015-01-21 12:32:32 +00001305 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001306 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001307 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001308 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001309 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1310 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001311 }
1312 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001313
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001314 int adjust = GetFrameSize() - GetCoreSpillSize();
1315 __ subq(CpuRegister(RSP), Immediate(adjust));
1316 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001317 uint32_t xmm_spill_location = GetFpuSpillStart();
1318 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001319
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001320 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1321 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001322 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1323 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1324 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001325 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001326 }
1327
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001328 // Save the current method if we need it. Note that we do not
1329 // do this in HCurrentMethod, as the instruction might have been removed
1330 // in the SSA graph.
1331 if (RequiresCurrentMethod()) {
1332 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1333 CpuRegister(kMethodRegisterArgument));
1334 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001335
1336 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1337 // Initialize should_deoptimize flag to 0.
1338 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1339 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001340}
1341
1342void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001343 __ cfi().RememberState();
1344 if (!HasEmptyFrame()) {
1345 uint32_t xmm_spill_location = GetFpuSpillStart();
1346 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1347 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1348 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1349 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1350 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1351 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1352 }
1353 }
1354
1355 int adjust = GetFrameSize() - GetCoreSpillSize();
1356 __ addq(CpuRegister(RSP), Immediate(adjust));
1357 __ cfi().AdjustCFAOffset(-adjust);
1358
1359 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1360 Register reg = kCoreCalleeSaves[i];
1361 if (allocated_registers_.ContainsCoreRegister(reg)) {
1362 __ popq(CpuRegister(reg));
1363 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1364 __ cfi().Restore(DWARFReg(reg));
1365 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001366 }
1367 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001368 __ ret();
1369 __ cfi().RestoreState();
1370 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001371}
1372
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001373void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1374 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001375}
1376
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001377void CodeGeneratorX86_64::Move(Location destination, Location source) {
1378 if (source.Equals(destination)) {
1379 return;
1380 }
1381 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001382 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001383 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001384 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001385 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001386 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001387 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001388 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1389 } else if (source.IsConstant()) {
1390 HConstant* constant = source.GetConstant();
1391 if (constant->IsLongConstant()) {
1392 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1393 } else {
1394 Load32BitValue(dest, GetInt32ValueOf(constant));
1395 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001396 } else {
1397 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001398 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001399 }
1400 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001401 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001402 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001403 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001404 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001405 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1406 } else if (source.IsConstant()) {
1407 HConstant* constant = source.GetConstant();
1408 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1409 if (constant->IsFloatConstant()) {
1410 Load32BitValue(dest, static_cast<int32_t>(value));
1411 } else {
1412 Load64BitValue(dest, value);
1413 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001414 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001415 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001416 } else {
1417 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001418 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001419 }
1420 } else if (destination.IsStackSlot()) {
1421 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001422 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001423 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001424 } else if (source.IsFpuRegister()) {
1425 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001426 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001427 } else if (source.IsConstant()) {
1428 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001429 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001430 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001431 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001432 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001433 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1434 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001435 }
1436 } else {
1437 DCHECK(destination.IsDoubleStackSlot());
1438 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001439 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001440 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001441 } else if (source.IsFpuRegister()) {
1442 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001443 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001444 } else if (source.IsConstant()) {
1445 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001446 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1447 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001448 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001449 } else {
1450 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001451 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1452 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001453 }
1454 }
1455}
1456
Calin Juravle175dc732015-08-25 15:42:32 +01001457void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1458 DCHECK(location.IsRegister());
1459 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1460}
1461
Calin Juravlee460d1d2015-09-29 04:52:17 +01001462void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001463 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001464 Move(dst, src);
1465}
1466
1467void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1468 if (location.IsRegister()) {
1469 locations->AddTemp(location);
1470 } else {
1471 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1472 }
1473}
1474
David Brazdilfc6a86a2015-06-26 10:33:45 +00001475void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001476 if (successor->IsExitBlock()) {
1477 DCHECK(got->GetPrevious()->AlwaysThrows());
1478 return; // no code needed
1479 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001480
1481 HBasicBlock* block = got->GetBlock();
1482 HInstruction* previous = got->GetPrevious();
1483
1484 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001485 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001486 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1487 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), 0));
1488 __ addw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1489 Immediate(1));
1490 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001491 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1492 return;
1493 }
1494
1495 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1496 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1497 }
1498 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001499 __ jmp(codegen_->GetLabelOf(successor));
1500 }
1501}
1502
David Brazdilfc6a86a2015-06-26 10:33:45 +00001503void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1504 got->SetLocations(nullptr);
1505}
1506
1507void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1508 HandleGoto(got, got->GetSuccessor());
1509}
1510
1511void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1512 try_boundary->SetLocations(nullptr);
1513}
1514
1515void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1516 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1517 if (!successor->IsExitBlock()) {
1518 HandleGoto(try_boundary, successor);
1519 }
1520}
1521
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001522void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1523 exit->SetLocations(nullptr);
1524}
1525
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001526void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001527}
1528
Mark Mendell152408f2015-12-31 12:28:50 -05001529template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001530void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001531 LabelType* true_label,
1532 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001533 if (cond->IsFPConditionTrueIfNaN()) {
1534 __ j(kUnordered, true_label);
1535 } else if (cond->IsFPConditionFalseIfNaN()) {
1536 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001537 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001538 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001539}
1540
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001541void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001542 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001543
Mark Mendellc4701932015-04-10 13:18:51 -04001544 Location left = locations->InAt(0);
1545 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001546 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001547 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001548 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001549 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550 case DataType::Type::kInt8:
1551 case DataType::Type::kUint16:
1552 case DataType::Type::kInt16:
1553 case DataType::Type::kInt32:
1554 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001555 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001556 break;
1557 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001558 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001559 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001560 break;
1561 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001562 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001563 if (right.IsFpuRegister()) {
1564 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1565 } else if (right.IsConstant()) {
1566 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1567 codegen_->LiteralFloatAddress(
1568 right.GetConstant()->AsFloatConstant()->GetValue()));
1569 } else {
1570 DCHECK(right.IsStackSlot());
1571 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1572 Address(CpuRegister(RSP), right.GetStackIndex()));
1573 }
Mark Mendellc4701932015-04-10 13:18:51 -04001574 break;
1575 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001576 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001577 if (right.IsFpuRegister()) {
1578 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1579 } else if (right.IsConstant()) {
1580 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1581 codegen_->LiteralDoubleAddress(
1582 right.GetConstant()->AsDoubleConstant()->GetValue()));
1583 } else {
1584 DCHECK(right.IsDoubleStackSlot());
1585 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1586 Address(CpuRegister(RSP), right.GetStackIndex()));
1587 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001588 break;
1589 }
1590 default:
1591 LOG(FATAL) << "Unexpected condition type " << type;
1592 }
1593}
1594
1595template<class LabelType>
1596void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1597 LabelType* true_target_in,
1598 LabelType* false_target_in) {
1599 // Generated branching requires both targets to be explicit. If either of the
1600 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1601 LabelType fallthrough_target;
1602 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1603 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1604
1605 // Generate the comparison to set the CC.
1606 GenerateCompareTest(condition);
1607
1608 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001609 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001610 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001611 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001612 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1613 break;
1614 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001615 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001616 GenerateFPJumps(condition, true_target, false_target);
1617 break;
1618 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001619 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001620 GenerateFPJumps(condition, true_target, false_target);
1621 break;
1622 }
1623 default:
1624 LOG(FATAL) << "Unexpected condition type " << type;
1625 }
1626
David Brazdil0debae72015-11-12 18:37:00 +00001627 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001628 __ jmp(false_target);
1629 }
David Brazdil0debae72015-11-12 18:37:00 +00001630
1631 if (fallthrough_target.IsLinked()) {
1632 __ Bind(&fallthrough_target);
1633 }
Mark Mendellc4701932015-04-10 13:18:51 -04001634}
1635
David Brazdil0debae72015-11-12 18:37:00 +00001636static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1637 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1638 // are set only strictly before `branch`. We can't use the eflags on long
1639 // conditions if they are materialized due to the complex branching.
1640 return cond->IsCondition() &&
1641 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001642 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001643}
1644
Mark Mendell152408f2015-12-31 12:28:50 -05001645template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001646void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001647 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001648 LabelType* true_target,
1649 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001650 HInstruction* cond = instruction->InputAt(condition_input_index);
1651
1652 if (true_target == nullptr && false_target == nullptr) {
1653 // Nothing to do. The code always falls through.
1654 return;
1655 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001656 // Constant condition, statically compared against "true" (integer value 1).
1657 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001658 if (true_target != nullptr) {
1659 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001660 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001661 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001662 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001663 if (false_target != nullptr) {
1664 __ jmp(false_target);
1665 }
1666 }
1667 return;
1668 }
1669
1670 // The following code generates these patterns:
1671 // (1) true_target == nullptr && false_target != nullptr
1672 // - opposite condition true => branch to false_target
1673 // (2) true_target != nullptr && false_target == nullptr
1674 // - condition true => branch to true_target
1675 // (3) true_target != nullptr && false_target != nullptr
1676 // - condition true => branch to true_target
1677 // - branch to false_target
1678 if (IsBooleanValueOrMaterializedCondition(cond)) {
1679 if (AreEflagsSetFrom(cond, instruction)) {
1680 if (true_target == nullptr) {
1681 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1682 } else {
1683 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1684 }
1685 } else {
1686 // Materialized condition, compare against 0.
1687 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1688 if (lhs.IsRegister()) {
1689 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1690 } else {
1691 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1692 }
1693 if (true_target == nullptr) {
1694 __ j(kEqual, false_target);
1695 } else {
1696 __ j(kNotEqual, true_target);
1697 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001698 }
1699 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001700 // Condition has not been materialized, use its inputs as the
1701 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001702 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001703
David Brazdil0debae72015-11-12 18:37:00 +00001704 // If this is a long or FP comparison that has been folded into
1705 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001706 DataType::Type type = condition->InputAt(0)->GetType();
1707 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001708 GenerateCompareTestAndBranch(condition, true_target, false_target);
1709 return;
1710 }
1711
1712 Location lhs = condition->GetLocations()->InAt(0);
1713 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001714 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001715 if (true_target == nullptr) {
1716 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1717 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001718 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001719 }
Dave Allison20dfc792014-06-16 20:44:29 -07001720 }
David Brazdil0debae72015-11-12 18:37:00 +00001721
1722 // If neither branch falls through (case 3), the conditional branch to `true_target`
1723 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1724 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001725 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001726 }
1727}
1728
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001729void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001730 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001731 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001732 locations->SetInAt(0, Location::Any());
1733 }
1734}
1735
1736void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001737 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1738 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1739 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1740 nullptr : codegen_->GetLabelOf(true_successor);
1741 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1742 nullptr : codegen_->GetLabelOf(false_successor);
1743 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001744}
1745
1746void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001747 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001748 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001749 InvokeRuntimeCallingConvention calling_convention;
1750 RegisterSet caller_saves = RegisterSet::Empty();
1751 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1752 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001753 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001754 locations->SetInAt(0, Location::Any());
1755 }
1756}
1757
1758void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001759 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001760 GenerateTestAndBranch<Label>(deoptimize,
1761 /* condition_input_index */ 0,
1762 slow_path->GetEntryLabel(),
1763 /* false_target */ nullptr);
1764}
1765
Mingyao Yang063fc772016-08-02 11:02:54 -07001766void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001767 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001768 LocationSummary(flag, LocationSummary::kNoCall);
1769 locations->SetOut(Location::RequiresRegister());
1770}
1771
1772void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1773 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1774 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1775}
1776
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001777static bool SelectCanUseCMOV(HSelect* select) {
1778 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001779 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001780 return false;
1781 }
1782
1783 // A FP condition doesn't generate the single CC that we need.
1784 HInstruction* condition = select->GetCondition();
1785 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001786 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001787 return false;
1788 }
1789
1790 // We can generate a CMOV for this Select.
1791 return true;
1792}
1793
David Brazdil74eb1b22015-12-14 11:44:01 +00001794void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001795 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001796 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001797 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001798 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001799 } else {
1800 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001801 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001802 if (select->InputAt(1)->IsConstant()) {
1803 locations->SetInAt(1, Location::RequiresRegister());
1804 } else {
1805 locations->SetInAt(1, Location::Any());
1806 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001807 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001808 locations->SetInAt(1, Location::Any());
1809 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001810 }
1811 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1812 locations->SetInAt(2, Location::RequiresRegister());
1813 }
1814 locations->SetOut(Location::SameAsFirstInput());
1815}
1816
1817void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1818 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001819 if (SelectCanUseCMOV(select)) {
1820 // If both the condition and the source types are integer, we can generate
1821 // a CMOV to implement Select.
1822 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001823 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001824 DCHECK(locations->InAt(0).Equals(locations->Out()));
1825
1826 HInstruction* select_condition = select->GetCondition();
1827 Condition cond = kNotEqual;
1828
1829 // Figure out how to test the 'condition'.
1830 if (select_condition->IsCondition()) {
1831 HCondition* condition = select_condition->AsCondition();
1832 if (!condition->IsEmittedAtUseSite()) {
1833 // This was a previously materialized condition.
1834 // Can we use the existing condition code?
1835 if (AreEflagsSetFrom(condition, select)) {
1836 // Materialization was the previous instruction. Condition codes are right.
1837 cond = X86_64IntegerCondition(condition->GetCondition());
1838 } else {
1839 // No, we have to recreate the condition code.
1840 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1841 __ testl(cond_reg, cond_reg);
1842 }
1843 } else {
1844 GenerateCompareTest(condition);
1845 cond = X86_64IntegerCondition(condition->GetCondition());
1846 }
1847 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001848 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001849 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1850 __ testl(cond_reg, cond_reg);
1851 }
1852
1853 // If the condition is true, overwrite the output, which already contains false.
1854 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001855 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001856 if (value_true_loc.IsRegister()) {
1857 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1858 } else {
1859 __ cmov(cond,
1860 value_false,
1861 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1862 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001863 } else {
1864 NearLabel false_target;
1865 GenerateTestAndBranch<NearLabel>(select,
1866 /* condition_input_index */ 2,
1867 /* true_target */ nullptr,
1868 &false_target);
1869 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1870 __ Bind(&false_target);
1871 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001872}
1873
David Srbecky0cf44932015-12-09 14:09:59 +00001874void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001875 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001876}
1877
David Srbeckyd28f4a02016-03-14 17:14:24 +00001878void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1879 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001880}
1881
1882void CodeGeneratorX86_64::GenerateNop() {
1883 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001884}
1885
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001886void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001887 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001888 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001889 // Handle the long/FP comparisons made in instruction simplification.
1890 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001891 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001892 locations->SetInAt(0, Location::RequiresRegister());
1893 locations->SetInAt(1, Location::Any());
1894 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001895 case DataType::Type::kFloat32:
1896 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04001897 locations->SetInAt(0, Location::RequiresFpuRegister());
1898 locations->SetInAt(1, Location::Any());
1899 break;
1900 default:
1901 locations->SetInAt(0, Location::RequiresRegister());
1902 locations->SetInAt(1, Location::Any());
1903 break;
1904 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001905 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001906 locations->SetOut(Location::RequiresRegister());
1907 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001908}
1909
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001910void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001911 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001912 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001913 }
Mark Mendellc4701932015-04-10 13:18:51 -04001914
1915 LocationSummary* locations = cond->GetLocations();
1916 Location lhs = locations->InAt(0);
1917 Location rhs = locations->InAt(1);
1918 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001919 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001920
1921 switch (cond->InputAt(0)->GetType()) {
1922 default:
1923 // Integer case.
1924
1925 // Clear output register: setcc only sets the low byte.
1926 __ xorl(reg, reg);
1927
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001928 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001929 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001930 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001931 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001932 // Clear output register: setcc only sets the low byte.
1933 __ xorl(reg, reg);
1934
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001935 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001936 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001937 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001938 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001939 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1940 if (rhs.IsConstant()) {
1941 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
1942 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
1943 } else if (rhs.IsStackSlot()) {
1944 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1945 } else {
1946 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1947 }
1948 GenerateFPJumps(cond, &true_label, &false_label);
1949 break;
1950 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001951 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001952 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1953 if (rhs.IsConstant()) {
1954 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
1955 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
1956 } else if (rhs.IsDoubleStackSlot()) {
1957 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
1958 } else {
1959 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
1960 }
1961 GenerateFPJumps(cond, &true_label, &false_label);
1962 break;
1963 }
1964 }
1965
1966 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001967 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001968
Roland Levillain4fa13f62015-07-06 18:11:54 +01001969 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001970 __ Bind(&false_label);
1971 __ xorl(reg, reg);
1972 __ jmp(&done_label);
1973
Roland Levillain4fa13f62015-07-06 18:11:54 +01001974 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001975 __ Bind(&true_label);
1976 __ movl(reg, Immediate(1));
1977 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001978}
1979
1980void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001981 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001982}
1983
1984void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001985 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001986}
1987
1988void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001989 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001990}
1991
1992void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001994}
1995
1996void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001997 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001998}
1999
2000void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002001 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002002}
2003
2004void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002005 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002006}
2007
2008void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002009 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002010}
2011
2012void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002013 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002014}
2015
2016void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002017 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002018}
2019
2020void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002021 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002022}
2023
2024void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002025 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002026}
2027
Aart Bike9f37602015-10-09 11:15:55 -07002028void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002029 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002030}
2031
2032void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002033 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002034}
2035
2036void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002037 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002038}
2039
2040void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002041 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002042}
2043
2044void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002045 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002046}
2047
2048void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002049 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002050}
2051
2052void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002053 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002054}
2055
2056void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002057 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002058}
2059
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002060void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002061 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002062 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002063 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002064 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002065 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002066 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002067 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002068 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 case DataType::Type::kInt32:
2070 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002071 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002072 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002073 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2074 break;
2075 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002076 case DataType::Type::kFloat32:
2077 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002078 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002079 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002080 locations->SetOut(Location::RequiresRegister());
2081 break;
2082 }
2083 default:
2084 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2085 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002086}
2087
2088void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002089 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002090 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002091 Location left = locations->InAt(0);
2092 Location right = locations->InAt(1);
2093
Mark Mendell0c9497d2015-08-21 09:30:05 -04002094 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002095 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002096 Condition less_cond = kLess;
2097
Calin Juravleddb7df22014-11-25 20:56:51 +00002098 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002099 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002100 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002101 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002102 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002103 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002104 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002105 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002106 break;
2107 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002108 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002109 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002110 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002111 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002112 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002113 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2114 if (right.IsConstant()) {
2115 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2116 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2117 } else if (right.IsStackSlot()) {
2118 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2119 } else {
2120 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2121 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002122 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002123 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002124 break;
2125 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002126 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002127 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2128 if (right.IsConstant()) {
2129 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2130 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2131 } else if (right.IsDoubleStackSlot()) {
2132 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2133 } else {
2134 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2135 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002136 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002137 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002138 break;
2139 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002140 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002141 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002142 }
Aart Bika19616e2016-02-01 18:57:58 -08002143
Calin Juravleddb7df22014-11-25 20:56:51 +00002144 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002145 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002146 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002147
Calin Juravle91debbc2014-11-26 19:01:09 +00002148 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002149 __ movl(out, Immediate(1));
2150 __ jmp(&done);
2151
2152 __ Bind(&less);
2153 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002154
2155 __ Bind(&done);
2156}
2157
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002158void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002159 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002160 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002161 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002162}
2163
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002164void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002165 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002166}
2167
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002168void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2169 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002170 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002171 locations->SetOut(Location::ConstantLocation(constant));
2172}
2173
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002174void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002175 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002176}
2177
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002178void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002179 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002180 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002181 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002182}
2183
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002184void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002185 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002186}
2187
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002188void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2189 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002190 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002191 locations->SetOut(Location::ConstantLocation(constant));
2192}
2193
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002194void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002195 // Will be generated at use site.
2196}
2197
2198void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2199 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002200 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002201 locations->SetOut(Location::ConstantLocation(constant));
2202}
2203
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002204void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2205 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002206 // Will be generated at use site.
2207}
2208
Igor Murashkind01745e2017-04-05 16:40:31 -07002209void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2210 constructor_fence->SetLocations(nullptr);
2211}
2212
2213void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2214 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2215 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2216}
2217
Calin Juravle27df7582015-04-17 19:12:31 +01002218void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2219 memory_barrier->SetLocations(nullptr);
2220}
2221
2222void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002223 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002224}
2225
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002226void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2227 ret->SetLocations(nullptr);
2228}
2229
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002230void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002231 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002232}
2233
2234void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002235 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002236 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002237 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002238 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002239 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002240 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002241 case DataType::Type::kInt8:
2242 case DataType::Type::kUint16:
2243 case DataType::Type::kInt16:
2244 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002245 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002246 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002247 break;
2248
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002249 case DataType::Type::kFloat32:
2250 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002251 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002252 break;
2253
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002254 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002255 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002256 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002257}
2258
2259void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2260 if (kIsDebugBuild) {
2261 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002262 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002263 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002264 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002265 case DataType::Type::kInt8:
2266 case DataType::Type::kUint16:
2267 case DataType::Type::kInt16:
2268 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002269 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002270 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002271 break;
2272
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002273 case DataType::Type::kFloat32:
2274 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002275 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002276 XMM0);
2277 break;
2278
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002279 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002280 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002281 }
2282 }
2283 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002284}
2285
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002286Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002287 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002288 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002289 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002290 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002291 case DataType::Type::kInt8:
2292 case DataType::Type::kUint16:
2293 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002294 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002295 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002296 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002297 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002298 return Location::RegisterLocation(RAX);
2299
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002300 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002301 return Location::NoLocation();
2302
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002303 case DataType::Type::kFloat64:
2304 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002305 return Location::FpuRegisterLocation(XMM0);
2306 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002307
2308 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002309}
2310
2311Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2312 return Location::RegisterLocation(kMethodRegisterArgument);
2313}
2314
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002315Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002316 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002317 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002318 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002319 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002320 case DataType::Type::kInt8:
2321 case DataType::Type::kUint16:
2322 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002323 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002324 uint32_t index = gp_index_++;
2325 stack_index_++;
2326 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002327 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002328 } else {
2329 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2330 }
2331 }
2332
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002333 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002334 uint32_t index = gp_index_;
2335 stack_index_ += 2;
2336 if (index < calling_convention.GetNumberOfRegisters()) {
2337 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002338 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002339 } else {
2340 gp_index_ += 2;
2341 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2342 }
2343 }
2344
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002345 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002346 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002347 stack_index_++;
2348 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002349 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002350 } else {
2351 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2352 }
2353 }
2354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002355 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002356 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002357 stack_index_ += 2;
2358 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002359 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002360 } else {
2361 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2362 }
2363 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002364
Aart Bik66c158e2018-01-31 12:55:04 -08002365 case DataType::Type::kUint32:
2366 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002367 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002368 LOG(FATAL) << "Unexpected parameter type " << type;
2369 break;
2370 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002371 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002372}
2373
Calin Juravle175dc732015-08-25 15:42:32 +01002374void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2375 // The trampoline uses the same calling convention as dex calling conventions,
2376 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2377 // the method_idx.
2378 HandleInvoke(invoke);
2379}
2380
2381void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2382 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2383}
2384
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002385void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002386 // Explicit clinit checks triggered by static invokes must have been pruned by
2387 // art::PrepareForRegisterAllocation.
2388 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002389
Mark Mendellfb8d2792015-03-31 22:16:59 -04002390 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002391 if (intrinsic.TryDispatch(invoke)) {
2392 return;
2393 }
2394
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002395 HandleInvoke(invoke);
2396}
2397
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002398static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2399 if (invoke->GetLocations()->Intrinsified()) {
2400 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2401 intrinsic.Dispatch(invoke);
2402 return true;
2403 }
2404 return false;
2405}
2406
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002407void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002408 // Explicit clinit checks triggered by static invokes must have been pruned by
2409 // art::PrepareForRegisterAllocation.
2410 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002411
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002412 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2413 return;
2414 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002415
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002416 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002417 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002418 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002419}
2420
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002421void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002422 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002423 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002424}
2425
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002426void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002427 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002428 if (intrinsic.TryDispatch(invoke)) {
2429 return;
2430 }
2431
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002432 HandleInvoke(invoke);
2433}
2434
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002435void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002436 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2437 return;
2438 }
2439
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002440 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002441 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002442}
2443
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002444void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2445 HandleInvoke(invoke);
2446 // Add the hidden argument.
2447 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2448}
2449
2450void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2451 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002452 LocationSummary* locations = invoke->GetLocations();
2453 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2454 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002455 Location receiver = locations->InAt(0);
2456 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2457
Roland Levillain0d5a2812015-11-13 10:07:31 +00002458 // Set the hidden argument. This is safe to do this here, as RAX
2459 // won't be modified thereafter, before the `call` instruction.
2460 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002461 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002462
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002463 if (receiver.IsStackSlot()) {
2464 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002465 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002466 __ movl(temp, Address(temp, class_offset));
2467 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002468 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002469 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002470 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002471 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002472 // Instead of simply (possibly) unpoisoning `temp` here, we should
2473 // emit a read barrier for the previous class reference load.
2474 // However this is not required in practice, as this is an
2475 // intermediate/temporary reference and because the current
2476 // concurrent copying collector keeps the from-space memory
2477 // intact/accessible until the end of the marking phase (the
2478 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002479 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002480 // temp = temp->GetAddressOfIMT()
2481 __ movq(temp,
2482 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2483 // temp = temp->GetImtEntryAt(method_offset);
2484 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002485 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002486 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002487 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002488 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002489 __ call(Address(
2490 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002491
2492 DCHECK(!codegen_->IsLeafMethod());
2493 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2494}
2495
Orion Hodsonac141392017-01-13 11:53:47 +00002496void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2497 HandleInvoke(invoke);
2498}
2499
2500void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2501 codegen_->GenerateInvokePolymorphicCall(invoke);
2502}
2503
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002504void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2505 HandleInvoke(invoke);
2506}
2507
2508void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2509 codegen_->GenerateInvokeCustomCall(invoke);
2510}
2511
Roland Levillain88cb1752014-10-20 16:36:47 +01002512void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2513 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002514 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002515 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002516 case DataType::Type::kInt32:
2517 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002518 locations->SetInAt(0, Location::RequiresRegister());
2519 locations->SetOut(Location::SameAsFirstInput());
2520 break;
2521
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002522 case DataType::Type::kFloat32:
2523 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002524 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002525 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002526 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002527 break;
2528
2529 default:
2530 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2531 }
2532}
2533
2534void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2535 LocationSummary* locations = neg->GetLocations();
2536 Location out = locations->Out();
2537 Location in = locations->InAt(0);
2538 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002539 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002540 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002541 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002542 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002543 break;
2544
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002545 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002546 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002547 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002548 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002549 break;
2550
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002551 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002552 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002553 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002554 // Implement float negation with an exclusive or with value
2555 // 0x80000000 (mask for bit 31, representing the sign of a
2556 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002557 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002558 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002559 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002560 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002561
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002562 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002563 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002564 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002565 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002566 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002567 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002568 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002569 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002570 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002571 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002572
2573 default:
2574 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2575 }
2576}
2577
Roland Levillaindff1f282014-11-05 14:15:05 +00002578void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2579 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002580 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002581 DataType::Type result_type = conversion->GetResultType();
2582 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002583 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2584 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002585
Roland Levillaindff1f282014-11-05 14:15:05 +00002586 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002587 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002588 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002589 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002590 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002591 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2592 locations->SetInAt(0, Location::Any());
2593 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002594 break;
2595
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002596 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002597 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002598 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002599 locations->SetInAt(0, Location::Any());
2600 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2601 break;
2602
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002603 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002604 locations->SetInAt(0, Location::RequiresFpuRegister());
2605 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002606 break;
2607
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002608 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002609 locations->SetInAt(0, Location::RequiresFpuRegister());
2610 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002611 break;
2612
2613 default:
2614 LOG(FATAL) << "Unexpected type conversion from " << input_type
2615 << " to " << result_type;
2616 }
2617 break;
2618
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002619 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002620 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002621 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002622 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002623 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002624 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002625 case DataType::Type::kInt16:
2626 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002627 // TODO: We would benefit from a (to-be-implemented)
2628 // Location::RegisterOrStackSlot requirement for this input.
2629 locations->SetInAt(0, Location::RequiresRegister());
2630 locations->SetOut(Location::RequiresRegister());
2631 break;
2632
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002633 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002634 locations->SetInAt(0, Location::RequiresFpuRegister());
2635 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002636 break;
2637
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002638 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002639 locations->SetInAt(0, Location::RequiresFpuRegister());
2640 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002641 break;
2642
2643 default:
2644 LOG(FATAL) << "Unexpected type conversion from " << input_type
2645 << " to " << result_type;
2646 }
2647 break;
2648
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002649 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002650 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002651 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002652 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002653 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002654 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002655 case DataType::Type::kInt16:
2656 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002657 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002658 locations->SetOut(Location::RequiresFpuRegister());
2659 break;
2660
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002661 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002662 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002663 locations->SetOut(Location::RequiresFpuRegister());
2664 break;
2665
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002666 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002667 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002668 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002669 break;
2670
2671 default:
2672 LOG(FATAL) << "Unexpected type conversion from " << input_type
2673 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002674 }
Roland Levillaincff13742014-11-17 14:32:17 +00002675 break;
2676
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002677 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002678 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002679 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002680 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002682 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002683 case DataType::Type::kInt16:
2684 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002685 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002686 locations->SetOut(Location::RequiresFpuRegister());
2687 break;
2688
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002689 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002690 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002691 locations->SetOut(Location::RequiresFpuRegister());
2692 break;
2693
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002694 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002695 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002696 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002697 break;
2698
2699 default:
2700 LOG(FATAL) << "Unexpected type conversion from " << input_type
2701 << " to " << result_type;
2702 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002703 break;
2704
2705 default:
2706 LOG(FATAL) << "Unexpected type conversion from " << input_type
2707 << " to " << result_type;
2708 }
2709}
2710
2711void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2712 LocationSummary* locations = conversion->GetLocations();
2713 Location out = locations->Out();
2714 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002715 DataType::Type result_type = conversion->GetResultType();
2716 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002717 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2718 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002719 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002720 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002721 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002722 case DataType::Type::kInt8:
2723 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002724 case DataType::Type::kInt16:
2725 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002726 case DataType::Type::kInt64:
2727 if (in.IsRegister()) {
2728 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2729 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2730 __ movzxb(out.AsRegister<CpuRegister>(),
2731 Address(CpuRegister(RSP), in.GetStackIndex()));
2732 } else {
2733 __ movl(out.AsRegister<CpuRegister>(),
2734 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2735 }
2736 break;
2737
2738 default:
2739 LOG(FATAL) << "Unexpected type conversion from " << input_type
2740 << " to " << result_type;
2741 }
2742 break;
2743
2744 case DataType::Type::kInt8:
2745 switch (input_type) {
2746 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002747 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002748 case DataType::Type::kInt16:
2749 case DataType::Type::kInt32:
2750 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002751 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002752 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002753 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002754 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002755 Address(CpuRegister(RSP), in.GetStackIndex()));
2756 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002757 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002758 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002759 }
2760 break;
2761
2762 default:
2763 LOG(FATAL) << "Unexpected type conversion from " << input_type
2764 << " to " << result_type;
2765 }
2766 break;
2767
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002768 case DataType::Type::kUint16:
2769 switch (input_type) {
2770 case DataType::Type::kInt8:
2771 case DataType::Type::kInt16:
2772 case DataType::Type::kInt32:
2773 case DataType::Type::kInt64:
2774 if (in.IsRegister()) {
2775 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2776 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2777 __ movzxw(out.AsRegister<CpuRegister>(),
2778 Address(CpuRegister(RSP), in.GetStackIndex()));
2779 } else {
2780 __ movl(out.AsRegister<CpuRegister>(),
2781 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2782 }
2783 break;
2784
2785 default:
2786 LOG(FATAL) << "Unexpected type conversion from " << input_type
2787 << " to " << result_type;
2788 }
2789 break;
2790
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002791 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002792 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002793 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002794 case DataType::Type::kInt32:
2795 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002796 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002797 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002798 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002799 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002800 Address(CpuRegister(RSP), in.GetStackIndex()));
2801 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002802 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002803 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002804 }
2805 break;
2806
2807 default:
2808 LOG(FATAL) << "Unexpected type conversion from " << input_type
2809 << " to " << result_type;
2810 }
2811 break;
2812
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002813 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002814 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002815 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002816 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002817 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002818 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002819 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002820 Address(CpuRegister(RSP), in.GetStackIndex()));
2821 } else {
2822 DCHECK(in.IsConstant());
2823 DCHECK(in.GetConstant()->IsLongConstant());
2824 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002825 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002826 }
2827 break;
2828
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002829 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002830 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2831 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002832 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002833
2834 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002835 // if input >= (float)INT_MAX goto done
2836 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002837 __ j(kAboveEqual, &done);
2838 // if input == NaN goto nan
2839 __ j(kUnordered, &nan);
2840 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002841 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002842 __ jmp(&done);
2843 __ Bind(&nan);
2844 // output = 0
2845 __ xorl(output, output);
2846 __ Bind(&done);
2847 break;
2848 }
2849
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002850 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002851 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2852 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002853 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002854
2855 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002856 // if input >= (double)INT_MAX goto done
2857 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002858 __ j(kAboveEqual, &done);
2859 // if input == NaN goto nan
2860 __ j(kUnordered, &nan);
2861 // output = double-to-int-truncate(input)
2862 __ cvttsd2si(output, input);
2863 __ jmp(&done);
2864 __ Bind(&nan);
2865 // output = 0
2866 __ xorl(output, output);
2867 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002868 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002869 }
Roland Levillain946e1432014-11-11 17:35:19 +00002870
2871 default:
2872 LOG(FATAL) << "Unexpected type conversion from " << input_type
2873 << " to " << result_type;
2874 }
2875 break;
2876
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002877 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002878 switch (input_type) {
2879 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002880 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002881 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002882 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002883 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002884 case DataType::Type::kInt16:
2885 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002886 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002887 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002888 break;
2889
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002890 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00002891 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2892 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002893 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002894
Mark Mendell92e83bf2015-05-07 11:25:03 -04002895 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002896 // if input >= (float)LONG_MAX goto done
2897 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002898 __ j(kAboveEqual, &done);
2899 // if input == NaN goto nan
2900 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002901 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002902 __ cvttss2si(output, input, true);
2903 __ jmp(&done);
2904 __ Bind(&nan);
2905 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002906 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002907 __ Bind(&done);
2908 break;
2909 }
2910
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002911 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002912 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2913 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002914 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002915
Mark Mendell92e83bf2015-05-07 11:25:03 -04002916 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002917 // if input >= (double)LONG_MAX goto done
2918 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002919 __ j(kAboveEqual, &done);
2920 // if input == NaN goto nan
2921 __ j(kUnordered, &nan);
2922 // output = double-to-long-truncate(input)
2923 __ cvttsd2si(output, input, true);
2924 __ jmp(&done);
2925 __ Bind(&nan);
2926 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002927 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002928 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002929 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002930 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002931
2932 default:
2933 LOG(FATAL) << "Unexpected type conversion from " << input_type
2934 << " to " << result_type;
2935 }
2936 break;
2937
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002938 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002939 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002940 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002941 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002942 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002943 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002944 case DataType::Type::kInt16:
2945 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002946 if (in.IsRegister()) {
2947 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
2948 } else if (in.IsConstant()) {
2949 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
2950 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002951 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002952 } else {
2953 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2954 Address(CpuRegister(RSP), in.GetStackIndex()), false);
2955 }
Roland Levillaincff13742014-11-17 14:32:17 +00002956 break;
2957
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002958 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002959 if (in.IsRegister()) {
2960 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
2961 } else if (in.IsConstant()) {
2962 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
2963 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06002964 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002965 } else {
2966 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
2967 Address(CpuRegister(RSP), in.GetStackIndex()), true);
2968 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002969 break;
2970
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002971 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002972 if (in.IsFpuRegister()) {
2973 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
2974 } else if (in.IsConstant()) {
2975 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
2976 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05002977 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04002978 } else {
2979 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
2980 Address(CpuRegister(RSP), in.GetStackIndex()));
2981 }
Roland Levillaincff13742014-11-17 14:32:17 +00002982 break;
2983
2984 default:
2985 LOG(FATAL) << "Unexpected type conversion from " << input_type
2986 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002987 }
Roland Levillaincff13742014-11-17 14:32:17 +00002988 break;
2989
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002990 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002991 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002992 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002993 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002994 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002995 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002996 case DataType::Type::kInt16:
2997 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002998 if (in.IsRegister()) {
2999 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3000 } else if (in.IsConstant()) {
3001 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3002 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003003 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003004 } else {
3005 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3006 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3007 }
Roland Levillaincff13742014-11-17 14:32:17 +00003008 break;
3009
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003010 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003011 if (in.IsRegister()) {
3012 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3013 } else if (in.IsConstant()) {
3014 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3015 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003016 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003017 } else {
3018 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3019 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3020 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003021 break;
3022
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003023 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003024 if (in.IsFpuRegister()) {
3025 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3026 } else if (in.IsConstant()) {
3027 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3028 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003029 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003030 } else {
3031 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3032 Address(CpuRegister(RSP), in.GetStackIndex()));
3033 }
Roland Levillaincff13742014-11-17 14:32:17 +00003034 break;
3035
3036 default:
3037 LOG(FATAL) << "Unexpected type conversion from " << input_type
3038 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003039 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003040 break;
3041
3042 default:
3043 LOG(FATAL) << "Unexpected type conversion from " << input_type
3044 << " to " << result_type;
3045 }
3046}
3047
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003048void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003049 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003050 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003051 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003052 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003053 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003054 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3055 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003056 break;
3057 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003058
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003059 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003060 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003061 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003062 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003063 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003064 break;
3065 }
3066
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003067 case DataType::Type::kFloat64:
3068 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003069 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003070 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003071 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003072 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003073 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003074
3075 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003076 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003077 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003078}
3079
3080void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3081 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003082 Location first = locations->InAt(0);
3083 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003084 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003085
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003086 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003087 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003088 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003089 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3090 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003091 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3092 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003093 } else {
3094 __ leal(out.AsRegister<CpuRegister>(), Address(
3095 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3096 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003097 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003098 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3099 __ addl(out.AsRegister<CpuRegister>(),
3100 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3101 } else {
3102 __ leal(out.AsRegister<CpuRegister>(), Address(
3103 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3104 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003105 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003106 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003107 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003108 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003109 break;
3110 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003111
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003112 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003113 if (second.IsRegister()) {
3114 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3115 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003116 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3117 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003118 } else {
3119 __ leaq(out.AsRegister<CpuRegister>(), Address(
3120 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3121 }
3122 } else {
3123 DCHECK(second.IsConstant());
3124 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3125 int32_t int32_value = Low32Bits(value);
3126 DCHECK_EQ(int32_value, value);
3127 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3128 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3129 } else {
3130 __ leaq(out.AsRegister<CpuRegister>(), Address(
3131 first.AsRegister<CpuRegister>(), int32_value));
3132 }
3133 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003134 break;
3135 }
3136
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003137 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003138 if (second.IsFpuRegister()) {
3139 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3140 } else if (second.IsConstant()) {
3141 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003142 codegen_->LiteralFloatAddress(
3143 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003144 } else {
3145 DCHECK(second.IsStackSlot());
3146 __ addss(first.AsFpuRegister<XmmRegister>(),
3147 Address(CpuRegister(RSP), second.GetStackIndex()));
3148 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003149 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003150 }
3151
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003152 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003153 if (second.IsFpuRegister()) {
3154 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3155 } else if (second.IsConstant()) {
3156 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003157 codegen_->LiteralDoubleAddress(
3158 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003159 } else {
3160 DCHECK(second.IsDoubleStackSlot());
3161 __ addsd(first.AsFpuRegister<XmmRegister>(),
3162 Address(CpuRegister(RSP), second.GetStackIndex()));
3163 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003164 break;
3165 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003166
3167 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003168 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003169 }
3170}
3171
3172void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003173 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003174 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003175 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003176 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003177 locations->SetInAt(0, Location::RequiresRegister());
3178 locations->SetInAt(1, Location::Any());
3179 locations->SetOut(Location::SameAsFirstInput());
3180 break;
3181 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003182 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003183 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003184 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003185 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003186 break;
3187 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003188 case DataType::Type::kFloat32:
3189 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003190 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003191 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003192 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003193 break;
Calin Juravle11351682014-10-23 15:38:15 +01003194 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003195 default:
Calin Juravle11351682014-10-23 15:38:15 +01003196 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003197 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003198}
3199
3200void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3201 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003202 Location first = locations->InAt(0);
3203 Location second = locations->InAt(1);
3204 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003205 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003206 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003207 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003208 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003209 } else if (second.IsConstant()) {
3210 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003211 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003212 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003213 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003214 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003215 break;
3216 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003217 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003218 if (second.IsConstant()) {
3219 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3220 DCHECK(IsInt<32>(value));
3221 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3222 } else {
3223 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3224 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003225 break;
3226 }
3227
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003228 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003229 if (second.IsFpuRegister()) {
3230 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3231 } else if (second.IsConstant()) {
3232 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003233 codegen_->LiteralFloatAddress(
3234 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003235 } else {
3236 DCHECK(second.IsStackSlot());
3237 __ subss(first.AsFpuRegister<XmmRegister>(),
3238 Address(CpuRegister(RSP), second.GetStackIndex()));
3239 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003240 break;
Calin Juravle11351682014-10-23 15:38:15 +01003241 }
3242
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003243 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003244 if (second.IsFpuRegister()) {
3245 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3246 } else if (second.IsConstant()) {
3247 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003248 codegen_->LiteralDoubleAddress(
3249 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003250 } else {
3251 DCHECK(second.IsDoubleStackSlot());
3252 __ subsd(first.AsFpuRegister<XmmRegister>(),
3253 Address(CpuRegister(RSP), second.GetStackIndex()));
3254 }
Calin Juravle11351682014-10-23 15:38:15 +01003255 break;
3256 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003257
3258 default:
Calin Juravle11351682014-10-23 15:38:15 +01003259 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003260 }
3261}
3262
Calin Juravle34bacdf2014-10-07 20:23:36 +01003263void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3264 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003265 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003266 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003267 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003268 locations->SetInAt(0, Location::RequiresRegister());
3269 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003270 if (mul->InputAt(1)->IsIntConstant()) {
3271 // Can use 3 operand multiply.
3272 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3273 } else {
3274 locations->SetOut(Location::SameAsFirstInput());
3275 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003276 break;
3277 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003278 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003279 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003280 locations->SetInAt(1, Location::Any());
3281 if (mul->InputAt(1)->IsLongConstant() &&
3282 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003283 // Can use 3 operand multiply.
3284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3285 } else {
3286 locations->SetOut(Location::SameAsFirstInput());
3287 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003288 break;
3289 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003290 case DataType::Type::kFloat32:
3291 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003292 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003293 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003294 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003295 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003296 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003297
3298 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003299 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003300 }
3301}
3302
3303void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3304 LocationSummary* locations = mul->GetLocations();
3305 Location first = locations->InAt(0);
3306 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003307 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003308 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003309 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003310 // The constant may have ended up in a register, so test explicitly to avoid
3311 // problems where the output may not be the same as the first operand.
3312 if (mul->InputAt(1)->IsIntConstant()) {
3313 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3314 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3315 } else if (second.IsRegister()) {
3316 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003317 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003318 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003319 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003320 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003321 __ imull(first.AsRegister<CpuRegister>(),
3322 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003323 }
3324 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003325 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003326 // The constant may have ended up in a register, so test explicitly to avoid
3327 // problems where the output may not be the same as the first operand.
3328 if (mul->InputAt(1)->IsLongConstant()) {
3329 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3330 if (IsInt<32>(value)) {
3331 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3332 Immediate(static_cast<int32_t>(value)));
3333 } else {
3334 // Have to use the constant area.
3335 DCHECK(first.Equals(out));
3336 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3337 }
3338 } else if (second.IsRegister()) {
3339 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003340 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003341 } else {
3342 DCHECK(second.IsDoubleStackSlot());
3343 DCHECK(first.Equals(out));
3344 __ imulq(first.AsRegister<CpuRegister>(),
3345 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003346 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003347 break;
3348 }
3349
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003350 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003351 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003352 if (second.IsFpuRegister()) {
3353 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3354 } else if (second.IsConstant()) {
3355 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003356 codegen_->LiteralFloatAddress(
3357 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003358 } else {
3359 DCHECK(second.IsStackSlot());
3360 __ mulss(first.AsFpuRegister<XmmRegister>(),
3361 Address(CpuRegister(RSP), second.GetStackIndex()));
3362 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003363 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003364 }
3365
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003366 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003367 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003368 if (second.IsFpuRegister()) {
3369 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3370 } else if (second.IsConstant()) {
3371 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003372 codegen_->LiteralDoubleAddress(
3373 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003374 } else {
3375 DCHECK(second.IsDoubleStackSlot());
3376 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3377 Address(CpuRegister(RSP), second.GetStackIndex()));
3378 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003379 break;
3380 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003381
3382 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003383 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003384 }
3385}
3386
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003387void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3388 uint32_t stack_adjustment, bool is_float) {
3389 if (source.IsStackSlot()) {
3390 DCHECK(is_float);
3391 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3392 } else if (source.IsDoubleStackSlot()) {
3393 DCHECK(!is_float);
3394 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3395 } else {
3396 // Write the value to the temporary location on the stack and load to FP stack.
3397 if (is_float) {
3398 Location stack_temp = Location::StackSlot(temp_offset);
3399 codegen_->Move(stack_temp, source);
3400 __ flds(Address(CpuRegister(RSP), temp_offset));
3401 } else {
3402 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3403 codegen_->Move(stack_temp, source);
3404 __ fldl(Address(CpuRegister(RSP), temp_offset));
3405 }
3406 }
3407}
3408
3409void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003410 DataType::Type type = rem->GetResultType();
3411 bool is_float = type == DataType::Type::kFloat32;
3412 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003413 LocationSummary* locations = rem->GetLocations();
3414 Location first = locations->InAt(0);
3415 Location second = locations->InAt(1);
3416 Location out = locations->Out();
3417
3418 // Create stack space for 2 elements.
3419 // TODO: enhance register allocator to ask for stack temporaries.
3420 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3421
3422 // Load the values to the FP stack in reverse order, using temporaries if needed.
3423 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3424 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3425
3426 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003427 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003428 __ Bind(&retry);
3429 __ fprem();
3430
3431 // Move FP status to AX.
3432 __ fstsw();
3433
3434 // And see if the argument reduction is complete. This is signaled by the
3435 // C2 FPU flag bit set to 0.
3436 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3437 __ j(kNotEqual, &retry);
3438
3439 // We have settled on the final value. Retrieve it into an XMM register.
3440 // Store FP top of stack to real stack.
3441 if (is_float) {
3442 __ fsts(Address(CpuRegister(RSP), 0));
3443 } else {
3444 __ fstl(Address(CpuRegister(RSP), 0));
3445 }
3446
3447 // Pop the 2 items from the FP stack.
3448 __ fucompp();
3449
3450 // Load the value from the stack into an XMM register.
3451 DCHECK(out.IsFpuRegister()) << out;
3452 if (is_float) {
3453 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3454 } else {
3455 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3456 }
3457
3458 // And remove the temporary stack space we allocated.
3459 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3460}
3461
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003462void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3463 DCHECK(instruction->IsDiv() || instruction->IsRem());
3464
3465 LocationSummary* locations = instruction->GetLocations();
3466 Location second = locations->InAt(1);
3467 DCHECK(second.IsConstant());
3468
3469 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3470 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003471 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003472
3473 DCHECK(imm == 1 || imm == -1);
3474
3475 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003476 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003477 if (instruction->IsRem()) {
3478 __ xorl(output_register, output_register);
3479 } else {
3480 __ movl(output_register, input_register);
3481 if (imm == -1) {
3482 __ negl(output_register);
3483 }
3484 }
3485 break;
3486 }
3487
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003488 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003489 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003490 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491 } else {
3492 __ movq(output_register, input_register);
3493 if (imm == -1) {
3494 __ negq(output_register);
3495 }
3496 }
3497 break;
3498 }
3499
3500 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003501 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003502 }
3503}
3504
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003505void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003506 LocationSummary* locations = instruction->GetLocations();
3507 Location second = locations->InAt(1);
3508
3509 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3510 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3511
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003512 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003513 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3514 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003515
3516 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3517
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003518 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003519 __ leal(tmp, Address(numerator, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003520 __ testl(numerator, numerator);
3521 __ cmov(kGreaterEqual, tmp, numerator);
3522 int shift = CTZ(imm);
3523 __ sarl(tmp, Immediate(shift));
3524
3525 if (imm < 0) {
3526 __ negl(tmp);
3527 }
3528
3529 __ movl(output_register, tmp);
3530 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003531 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003532 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
3533
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003534 codegen_->Load64BitValue(rdx, abs_imm - 1);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 __ addq(rdx, numerator);
3536 __ testq(numerator, numerator);
3537 __ cmov(kGreaterEqual, rdx, numerator);
3538 int shift = CTZ(imm);
3539 __ sarq(rdx, Immediate(shift));
3540
3541 if (imm < 0) {
3542 __ negq(rdx);
3543 }
3544
3545 __ movq(output_register, rdx);
3546 }
3547}
3548
3549void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3550 DCHECK(instruction->IsDiv() || instruction->IsRem());
3551
3552 LocationSummary* locations = instruction->GetLocations();
3553 Location second = locations->InAt(1);
3554
3555 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3556 : locations->GetTemp(0).AsRegister<CpuRegister>();
3557 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3558 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3559 : locations->Out().AsRegister<CpuRegister>();
3560 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3561
3562 DCHECK_EQ(RAX, eax.AsRegister());
3563 DCHECK_EQ(RDX, edx.AsRegister());
3564 if (instruction->IsDiv()) {
3565 DCHECK_EQ(RAX, out.AsRegister());
3566 } else {
3567 DCHECK_EQ(RDX, out.AsRegister());
3568 }
3569
3570 int64_t magic;
3571 int shift;
3572
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003573 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003574 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003575 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3576
3577 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3578
3579 __ movl(numerator, eax);
3580
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003581 __ movl(eax, Immediate(magic));
3582 __ imull(numerator);
3583
3584 if (imm > 0 && magic < 0) {
3585 __ addl(edx, numerator);
3586 } else if (imm < 0 && magic > 0) {
3587 __ subl(edx, numerator);
3588 }
3589
3590 if (shift != 0) {
3591 __ sarl(edx, Immediate(shift));
3592 }
3593
3594 __ movl(eax, edx);
3595 __ shrl(edx, Immediate(31));
3596 __ addl(edx, eax);
3597
3598 if (instruction->IsRem()) {
3599 __ movl(eax, numerator);
3600 __ imull(edx, Immediate(imm));
3601 __ subl(eax, edx);
3602 __ movl(edx, eax);
3603 } else {
3604 __ movl(eax, edx);
3605 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003606 } else {
3607 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3608
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003609 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003610
3611 CpuRegister rax = eax;
3612 CpuRegister rdx = edx;
3613
3614 CalculateMagicAndShiftForDivRem(imm, true /* is_long */, &magic, &shift);
3615
3616 // Save the numerator.
3617 __ movq(numerator, rax);
3618
3619 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003620 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003621
3622 // RDX:RAX = magic * numerator
3623 __ imulq(numerator);
3624
3625 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003626 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003627 __ addq(rdx, numerator);
3628 } else if (imm < 0 && magic > 0) {
3629 // RDX -= numerator
3630 __ subq(rdx, numerator);
3631 }
3632
3633 // Shift if needed.
3634 if (shift != 0) {
3635 __ sarq(rdx, Immediate(shift));
3636 }
3637
3638 // RDX += 1 if RDX < 0
3639 __ movq(rax, rdx);
3640 __ shrq(rdx, Immediate(63));
3641 __ addq(rdx, rax);
3642
3643 if (instruction->IsRem()) {
3644 __ movq(rax, numerator);
3645
3646 if (IsInt<32>(imm)) {
3647 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3648 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003649 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 }
3651
3652 __ subq(rax, rdx);
3653 __ movq(rdx, rax);
3654 } else {
3655 __ movq(rax, rdx);
3656 }
3657 }
3658}
3659
Calin Juravlebacfec32014-11-14 15:54:36 +00003660void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3661 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003662 DataType::Type type = instruction->GetResultType();
3663 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003664
3665 bool is_div = instruction->IsDiv();
3666 LocationSummary* locations = instruction->GetLocations();
3667
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003668 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3669 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003670
Roland Levillain271ab9c2014-11-27 15:23:57 +00003671 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003672 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003673
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003674 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003675 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003676
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003677 if (imm == 0) {
3678 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3679 } else if (imm == 1 || imm == -1) {
3680 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003681 } else if (instruction->IsDiv() && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003682 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003683 } else {
3684 DCHECK(imm <= -2 || imm >= 2);
3685 GenerateDivRemWithAnyConstant(instruction);
3686 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003687 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003688 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003689 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003690 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003691 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003692
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003693 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3694 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3695 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3696 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003697 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003698 __ cmpl(second_reg, Immediate(-1));
3699 __ j(kEqual, slow_path->GetEntryLabel());
3700 // edx:eax <- sign-extended of eax
3701 __ cdq();
3702 // eax = quotient, edx = remainder
3703 __ idivl(second_reg);
3704 } else {
3705 __ cmpq(second_reg, Immediate(-1));
3706 __ j(kEqual, slow_path->GetEntryLabel());
3707 // rdx:rax <- sign-extended of rax
3708 __ cqo();
3709 // rax = quotient, rdx = remainder
3710 __ idivq(second_reg);
3711 }
3712 __ Bind(slow_path->GetExitLabel());
3713 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003714}
3715
Calin Juravle7c4954d2014-10-28 16:57:40 +00003716void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3717 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003718 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003719 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003720 case DataType::Type::kInt32:
3721 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003722 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003723 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003724 locations->SetOut(Location::SameAsFirstInput());
3725 // Intel uses edx:eax as the dividend.
3726 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003727 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3728 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3729 // output and request another temp.
3730 if (div->InputAt(1)->IsConstant()) {
3731 locations->AddTemp(Location::RequiresRegister());
3732 }
Calin Juravled0d48522014-11-04 16:40:20 +00003733 break;
3734 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003735
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003736 case DataType::Type::kFloat32:
3737 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003738 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003739 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003740 locations->SetOut(Location::SameAsFirstInput());
3741 break;
3742 }
3743
3744 default:
3745 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3746 }
3747}
3748
3749void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3750 LocationSummary* locations = div->GetLocations();
3751 Location first = locations->InAt(0);
3752 Location second = locations->InAt(1);
3753 DCHECK(first.Equals(locations->Out()));
3754
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003755 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003756 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003757 case DataType::Type::kInt32:
3758 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003759 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003760 break;
3761 }
3762
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003763 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003764 if (second.IsFpuRegister()) {
3765 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3766 } else if (second.IsConstant()) {
3767 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003768 codegen_->LiteralFloatAddress(
3769 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003770 } else {
3771 DCHECK(second.IsStackSlot());
3772 __ divss(first.AsFpuRegister<XmmRegister>(),
3773 Address(CpuRegister(RSP), second.GetStackIndex()));
3774 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003775 break;
3776 }
3777
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003778 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003779 if (second.IsFpuRegister()) {
3780 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3781 } else if (second.IsConstant()) {
3782 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003783 codegen_->LiteralDoubleAddress(
3784 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003785 } else {
3786 DCHECK(second.IsDoubleStackSlot());
3787 __ divsd(first.AsFpuRegister<XmmRegister>(),
3788 Address(CpuRegister(RSP), second.GetStackIndex()));
3789 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003790 break;
3791 }
3792
3793 default:
3794 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3795 }
3796}
3797
Calin Juravlebacfec32014-11-14 15:54:36 +00003798void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003799 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003800 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003801 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003802
3803 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003804 case DataType::Type::kInt32:
3805 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003806 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003807 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003808 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3809 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003810 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3811 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3812 // output and request another temp.
3813 if (rem->InputAt(1)->IsConstant()) {
3814 locations->AddTemp(Location::RequiresRegister());
3815 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003816 break;
3817 }
3818
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003819 case DataType::Type::kFloat32:
3820 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003821 locations->SetInAt(0, Location::Any());
3822 locations->SetInAt(1, Location::Any());
3823 locations->SetOut(Location::RequiresFpuRegister());
3824 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003825 break;
3826 }
3827
3828 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003829 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003830 }
3831}
3832
3833void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003834 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003835 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003836 case DataType::Type::kInt32:
3837 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003838 GenerateDivRemIntegral(rem);
3839 break;
3840 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003841 case DataType::Type::kFloat32:
3842 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003843 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003844 break;
3845 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003846 default:
3847 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3848 }
3849}
3850
Aart Bik1f8d51b2018-02-15 10:42:37 -08003851static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
3852 LocationSummary* locations = new (allocator) LocationSummary(minmax);
3853 switch (minmax->GetResultType()) {
3854 case DataType::Type::kInt32:
3855 case DataType::Type::kInt64:
3856 locations->SetInAt(0, Location::RequiresRegister());
3857 locations->SetInAt(1, Location::RequiresRegister());
3858 locations->SetOut(Location::SameAsFirstInput());
3859 break;
3860 case DataType::Type::kFloat32:
3861 case DataType::Type::kFloat64:
3862 locations->SetInAt(0, Location::RequiresFpuRegister());
3863 locations->SetInAt(1, Location::RequiresFpuRegister());
3864 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
3865 // the second input to be the output (we can simply swap inputs).
3866 locations->SetOut(Location::SameAsFirstInput());
3867 break;
3868 default:
3869 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
3870 }
3871}
3872
Aart Bik351df3e2018-03-07 11:54:57 -08003873void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
3874 bool is_min,
3875 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08003876 Location op1_loc = locations->InAt(0);
3877 Location op2_loc = locations->InAt(1);
3878
3879 // Shortcut for same input locations.
3880 if (op1_loc.Equals(op2_loc)) {
3881 // Can return immediately, as op1_loc == out_loc.
3882 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
3883 // a copy here.
3884 DCHECK(locations->Out().Equals(op1_loc));
3885 return;
3886 }
3887
3888 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3889 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
3890
3891 // (out := op1)
3892 // out <=? op2
3893 // if out is min jmp done
3894 // out := op2
3895 // done:
3896
3897 if (type == DataType::Type::kInt64) {
3898 __ cmpq(out, op2);
3899 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
3900 } else {
3901 DCHECK_EQ(type, DataType::Type::kInt32);
3902 __ cmpl(out, op2);
3903 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
3904 }
3905}
3906
3907void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
3908 bool is_min,
3909 DataType::Type type) {
3910 Location op1_loc = locations->InAt(0);
3911 Location op2_loc = locations->InAt(1);
3912 Location out_loc = locations->Out();
3913 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
3914
3915 // Shortcut for same input locations.
3916 if (op1_loc.Equals(op2_loc)) {
3917 DCHECK(out_loc.Equals(op1_loc));
3918 return;
3919 }
3920
3921 // (out := op1)
3922 // out <=? op2
3923 // if Nan jmp Nan_label
3924 // if out is min jmp done
3925 // if op2 is min jmp op2_label
3926 // handle -0/+0
3927 // jmp done
3928 // Nan_label:
3929 // out := NaN
3930 // op2_label:
3931 // out := op2
3932 // done:
3933 //
3934 // This removes one jmp, but needs to copy one input (op1) to out.
3935 //
3936 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
3937
3938 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
3939
3940 NearLabel nan, done, op2_label;
3941 if (type == DataType::Type::kFloat64) {
3942 __ ucomisd(out, op2);
3943 } else {
3944 DCHECK_EQ(type, DataType::Type::kFloat32);
3945 __ ucomiss(out, op2);
3946 }
3947
3948 __ j(Condition::kParityEven, &nan);
3949
3950 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
3951 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
3952
3953 // Handle 0.0/-0.0.
3954 if (is_min) {
3955 if (type == DataType::Type::kFloat64) {
3956 __ orpd(out, op2);
3957 } else {
3958 __ orps(out, op2);
3959 }
3960 } else {
3961 if (type == DataType::Type::kFloat64) {
3962 __ andpd(out, op2);
3963 } else {
3964 __ andps(out, op2);
3965 }
3966 }
3967 __ jmp(&done);
3968
3969 // NaN handling.
3970 __ Bind(&nan);
3971 if (type == DataType::Type::kFloat64) {
3972 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
3973 } else {
3974 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
3975 }
3976 __ jmp(&done);
3977
3978 // out := op2;
3979 __ Bind(&op2_label);
3980 if (type == DataType::Type::kFloat64) {
3981 __ movsd(out, op2);
3982 } else {
3983 __ movss(out, op2);
3984 }
3985
3986 // Done.
3987 __ Bind(&done);
3988}
3989
Aart Bik351df3e2018-03-07 11:54:57 -08003990void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
3991 DataType::Type type = minmax->GetResultType();
3992 switch (type) {
3993 case DataType::Type::kInt32:
3994 case DataType::Type::kInt64:
3995 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
3996 break;
3997 case DataType::Type::kFloat32:
3998 case DataType::Type::kFloat64:
3999 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4000 break;
4001 default:
4002 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4003 }
4004}
4005
Aart Bik1f8d51b2018-02-15 10:42:37 -08004006void LocationsBuilderX86_64::VisitMin(HMin* min) {
4007 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4008}
4009
4010void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004011 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004012}
4013
4014void LocationsBuilderX86_64::VisitMax(HMax* max) {
4015 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4016}
4017
4018void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004019 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004020}
4021
Aart Bik3dad3412018-02-28 12:01:46 -08004022void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4023 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4024 switch (abs->GetResultType()) {
4025 case DataType::Type::kInt32:
4026 case DataType::Type::kInt64:
4027 locations->SetInAt(0, Location::RequiresRegister());
4028 locations->SetOut(Location::SameAsFirstInput());
4029 locations->AddTemp(Location::RequiresRegister());
4030 break;
4031 case DataType::Type::kFloat32:
4032 case DataType::Type::kFloat64:
4033 locations->SetInAt(0, Location::RequiresFpuRegister());
4034 locations->SetOut(Location::SameAsFirstInput());
4035 locations->AddTemp(Location::RequiresFpuRegister());
4036 break;
4037 default:
4038 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4039 }
4040}
4041
4042void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4043 LocationSummary* locations = abs->GetLocations();
4044 switch (abs->GetResultType()) {
4045 case DataType::Type::kInt32: {
4046 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4047 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4048 // Create mask.
4049 __ movl(mask, out);
4050 __ sarl(mask, Immediate(31));
4051 // Add mask.
4052 __ addl(out, mask);
4053 __ xorl(out, mask);
4054 break;
4055 }
4056 case DataType::Type::kInt64: {
4057 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4058 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4059 // Create mask.
4060 __ movq(mask, out);
4061 __ sarq(mask, Immediate(63));
4062 // Add mask.
4063 __ addq(out, mask);
4064 __ xorq(out, mask);
4065 break;
4066 }
4067 case DataType::Type::kFloat32: {
4068 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4069 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4070 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4071 __ andps(out, mask);
4072 break;
4073 }
4074 case DataType::Type::kFloat64: {
4075 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4076 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4077 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4078 __ andpd(out, mask);
4079 break;
4080 }
4081 default:
4082 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4083 }
4084}
4085
Calin Juravled0d48522014-11-04 16:40:20 +00004086void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004087 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004088 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004089}
4090
4091void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004092 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004093 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004094 codegen_->AddSlowPath(slow_path);
4095
4096 LocationSummary* locations = instruction->GetLocations();
4097 Location value = locations->InAt(0);
4098
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004099 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004100 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004101 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004102 case DataType::Type::kInt8:
4103 case DataType::Type::kUint16:
4104 case DataType::Type::kInt16:
4105 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004106 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004107 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004108 __ j(kEqual, slow_path->GetEntryLabel());
4109 } else if (value.IsStackSlot()) {
4110 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4111 __ j(kEqual, slow_path->GetEntryLabel());
4112 } else {
4113 DCHECK(value.IsConstant()) << value;
4114 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004115 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004116 }
4117 }
4118 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004119 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004120 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004121 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004122 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004123 __ j(kEqual, slow_path->GetEntryLabel());
4124 } else if (value.IsDoubleStackSlot()) {
4125 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4126 __ j(kEqual, slow_path->GetEntryLabel());
4127 } else {
4128 DCHECK(value.IsConstant()) << value;
4129 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004130 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004131 }
4132 }
4133 break;
4134 }
4135 default:
4136 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004137 }
Calin Juravled0d48522014-11-04 16:40:20 +00004138}
4139
Calin Juravle9aec02f2014-11-18 23:06:35 +00004140void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4141 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4142
4143 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004144 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004145
4146 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004147 case DataType::Type::kInt32:
4148 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004149 locations->SetInAt(0, Location::RequiresRegister());
4150 // The shift count needs to be in CL.
4151 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4152 locations->SetOut(Location::SameAsFirstInput());
4153 break;
4154 }
4155 default:
4156 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4157 }
4158}
4159
4160void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4161 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4162
4163 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004164 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004165 Location second = locations->InAt(1);
4166
4167 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004168 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004169 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004170 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004171 if (op->IsShl()) {
4172 __ shll(first_reg, second_reg);
4173 } else if (op->IsShr()) {
4174 __ sarl(first_reg, second_reg);
4175 } else {
4176 __ shrl(first_reg, second_reg);
4177 }
4178 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004179 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004180 if (op->IsShl()) {
4181 __ shll(first_reg, imm);
4182 } else if (op->IsShr()) {
4183 __ sarl(first_reg, imm);
4184 } else {
4185 __ shrl(first_reg, imm);
4186 }
4187 }
4188 break;
4189 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004190 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004191 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004192 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004193 if (op->IsShl()) {
4194 __ shlq(first_reg, second_reg);
4195 } else if (op->IsShr()) {
4196 __ sarq(first_reg, second_reg);
4197 } else {
4198 __ shrq(first_reg, second_reg);
4199 }
4200 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004201 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004202 if (op->IsShl()) {
4203 __ shlq(first_reg, imm);
4204 } else if (op->IsShr()) {
4205 __ sarq(first_reg, imm);
4206 } else {
4207 __ shrq(first_reg, imm);
4208 }
4209 }
4210 break;
4211 }
4212 default:
4213 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004214 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004215 }
4216}
4217
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004218void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4219 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004220 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004221
4222 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004223 case DataType::Type::kInt32:
4224 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004225 locations->SetInAt(0, Location::RequiresRegister());
4226 // The shift count needs to be in CL (unless it is a constant).
4227 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4228 locations->SetOut(Location::SameAsFirstInput());
4229 break;
4230 }
4231 default:
4232 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4233 UNREACHABLE();
4234 }
4235}
4236
4237void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4238 LocationSummary* locations = ror->GetLocations();
4239 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4240 Location second = locations->InAt(1);
4241
4242 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004243 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004244 if (second.IsRegister()) {
4245 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4246 __ rorl(first_reg, second_reg);
4247 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004248 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004249 __ rorl(first_reg, imm);
4250 }
4251 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004252 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004253 if (second.IsRegister()) {
4254 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4255 __ rorq(first_reg, second_reg);
4256 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004257 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004258 __ rorq(first_reg, imm);
4259 }
4260 break;
4261 default:
4262 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4263 UNREACHABLE();
4264 }
4265}
4266
Calin Juravle9aec02f2014-11-18 23:06:35 +00004267void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4268 HandleShift(shl);
4269}
4270
4271void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4272 HandleShift(shl);
4273}
4274
4275void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4276 HandleShift(shr);
4277}
4278
4279void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4280 HandleShift(shr);
4281}
4282
4283void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4284 HandleShift(ushr);
4285}
4286
4287void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4288 HandleShift(ushr);
4289}
4290
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004291void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004292 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4293 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004294 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004295 if (instruction->IsStringAlloc()) {
4296 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4297 } else {
4298 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004299 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004300 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004301}
4302
4303void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004304 // Note: if heap poisoning is enabled, the entry point takes cares
4305 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004306 if (instruction->IsStringAlloc()) {
4307 // String is allocated through StringFactory. Call NewEmptyString entry point.
4308 CpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<CpuRegister>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004309 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004310 __ gs()->movq(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString), /* no_rip */ true));
4311 __ call(Address(temp, code_offset.SizeValue()));
4312 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4313 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004314 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004315 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004316 DCHECK(!codegen_->IsLeafMethod());
4317 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004318}
4319
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004320void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004321 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4322 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004323 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004324 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004325 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4326 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004327}
4328
4329void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004330 // Note: if heap poisoning is enabled, the entry point takes cares
4331 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004332 QuickEntrypointEnum entrypoint =
4333 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4334 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004335 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004336 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004337}
4338
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004339void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004340 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004341 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004342 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4343 if (location.IsStackSlot()) {
4344 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4345 } else if (location.IsDoubleStackSlot()) {
4346 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4347 }
4348 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004349}
4350
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004351void InstructionCodeGeneratorX86_64::VisitParameterValue(
4352 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004353 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004354}
4355
4356void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4357 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004358 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004359 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4360}
4361
4362void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4363 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4364 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004365}
4366
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004367void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4368 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004369 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004370 locations->SetInAt(0, Location::RequiresRegister());
4371 locations->SetOut(Location::RequiresRegister());
4372}
4373
4374void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4375 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004376 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004377 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004378 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004379 __ movq(locations->Out().AsRegister<CpuRegister>(),
4380 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004381 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004382 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004383 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004384 __ movq(locations->Out().AsRegister<CpuRegister>(),
4385 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4386 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004387 __ movq(locations->Out().AsRegister<CpuRegister>(),
4388 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004389 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004390}
4391
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004392void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004393 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004394 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004395 locations->SetInAt(0, Location::RequiresRegister());
4396 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004397}
4398
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004399void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4400 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004401 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4402 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004403 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004404 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004405 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004406 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004407 break;
4408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004409 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004410 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004411 break;
4412
4413 default:
4414 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4415 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004416}
4417
David Brazdil66d126e2015-04-03 16:02:44 +01004418void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4419 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004420 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004421 locations->SetInAt(0, Location::RequiresRegister());
4422 locations->SetOut(Location::SameAsFirstInput());
4423}
4424
4425void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004426 LocationSummary* locations = bool_not->GetLocations();
4427 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4428 locations->Out().AsRegister<CpuRegister>().AsRegister());
4429 Location out = locations->Out();
4430 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4431}
4432
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004433void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004434 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004435 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004436 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004437 locations->SetInAt(i, Location::Any());
4438 }
4439 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004440}
4441
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004442void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004443 LOG(FATAL) << "Unimplemented";
4444}
4445
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004446void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004447 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004448 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004449 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004450 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4451 */
4452 switch (kind) {
4453 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004454 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004455 break;
4456 }
4457 case MemBarrierKind::kAnyStore:
4458 case MemBarrierKind::kLoadAny:
4459 case MemBarrierKind::kStoreStore: {
4460 // nop
4461 break;
4462 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004463 case MemBarrierKind::kNTStoreStore:
4464 // Non-Temporal Store/Store needs an explicit fence.
4465 MemoryFence(/* non-temporal */ true);
4466 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004467 }
4468}
4469
4470void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4471 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4472
Roland Levillain0d5a2812015-11-13 10:07:31 +00004473 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004474 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004475 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004476 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4477 object_field_get_with_read_barrier
4478 ? LocationSummary::kCallOnSlowPath
4479 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004480 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004481 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004482 }
Calin Juravle52c48962014-12-16 17:02:57 +00004483 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004484 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004485 locations->SetOut(Location::RequiresFpuRegister());
4486 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004487 // The output overlaps for an object field get when read barriers
4488 // are enabled: we do not want the move to overwrite the object's
4489 // location, as we need it to emit the read barrier.
4490 locations->SetOut(
4491 Location::RequiresRegister(),
4492 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004493 }
Calin Juravle52c48962014-12-16 17:02:57 +00004494}
4495
4496void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4497 const FieldInfo& field_info) {
4498 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4499
4500 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004501 Location base_loc = locations->InAt(0);
4502 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004503 Location out = locations->Out();
4504 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004505 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4506 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004507 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4508
Vladimir Marko61b92282017-10-11 13:23:17 +01004509 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004510 case DataType::Type::kBool:
4511 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004512 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4513 break;
4514 }
4515
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004516 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004517 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4518 break;
4519 }
4520
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004521 case DataType::Type::kUint16: {
4522 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004523 break;
4524 }
4525
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004526 case DataType::Type::kInt16: {
4527 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004528 break;
4529 }
4530
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004531 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004532 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4533 break;
4534 }
4535
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004536 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004537 // /* HeapReference<Object> */ out = *(base + offset)
4538 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004539 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004540 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004541 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004542 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004543 if (is_volatile) {
4544 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4545 }
4546 } else {
4547 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4548 codegen_->MaybeRecordImplicitNullCheck(instruction);
4549 if (is_volatile) {
4550 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4551 }
4552 // If read barriers are enabled, emit read barriers other than
4553 // Baker's using a slow path (and also unpoison the loaded
4554 // reference, if heap poisoning is enabled).
4555 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4556 }
4557 break;
4558 }
4559
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004560 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004561 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4562 break;
4563 }
4564
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004565 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004566 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4567 break;
4568 }
4569
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004570 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004571 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4572 break;
4573 }
4574
Aart Bik66c158e2018-01-31 12:55:04 -08004575 case DataType::Type::kUint32:
4576 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004577 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004578 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004579 UNREACHABLE();
4580 }
4581
Vladimir Marko61b92282017-10-11 13:23:17 +01004582 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004583 // Potential implicit null checks, in the case of reference
4584 // fields, are handled in the previous switch statement.
4585 } else {
4586 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004587 }
Roland Levillain4d027112015-07-01 15:41:14 +01004588
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004589 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004590 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004591 // Memory barriers, in the case of references, are also handled
4592 // in the previous switch statement.
4593 } else {
4594 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4595 }
Roland Levillain4d027112015-07-01 15:41:14 +01004596 }
Calin Juravle52c48962014-12-16 17:02:57 +00004597}
4598
4599void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4600 const FieldInfo& field_info) {
4601 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4602
4603 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004604 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004605 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004606 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004607 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004608 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004609
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004610 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004611 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004612 if (is_volatile) {
4613 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4614 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4615 } else {
4616 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4617 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004618 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004619 if (is_volatile) {
4620 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4621 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4622 } else {
4623 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4624 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004625 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004626 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004627 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004628 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004629 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004630 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004631 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004632 locations->AddTemp(Location::RequiresRegister());
4633 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004634}
4635
Calin Juravle52c48962014-12-16 17:02:57 +00004636void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004637 const FieldInfo& field_info,
4638 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004639 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4640
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004641 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004642 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4643 Location value = locations->InAt(1);
4644 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004645 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004646 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4647
4648 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004649 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004650 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004651
Mark Mendellea5af682015-10-22 17:35:49 -04004652 bool maybe_record_implicit_null_check_done = false;
4653
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004654 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004655 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004656 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004657 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004658 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004659 __ movb(Address(base, offset),
4660 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004661 } else {
4662 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4663 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004664 break;
4665 }
4666
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004667 case DataType::Type::kUint16:
4668 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004669 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004670 __ movw(Address(base, offset),
4671 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004672 } else {
4673 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4674 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004675 break;
4676 }
4677
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004678 case DataType::Type::kInt32:
4679 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004680 if (value.IsConstant()) {
4681 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004682 // `field_type == DataType::Type::kReference` implies `v == 0`.
4683 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004684 // Note: if heap poisoning is enabled, no need to poison
4685 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004686 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004687 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004688 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004689 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4690 __ movl(temp, value.AsRegister<CpuRegister>());
4691 __ PoisonHeapReference(temp);
4692 __ movl(Address(base, offset), temp);
4693 } else {
4694 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4695 }
Mark Mendell40741f32015-04-20 22:10:34 -04004696 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004697 break;
4698 }
4699
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004700 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004701 if (value.IsConstant()) {
4702 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004703 codegen_->MoveInt64ToAddress(Address(base, offset),
4704 Address(base, offset + sizeof(int32_t)),
4705 v,
4706 instruction);
4707 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004708 } else {
4709 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4710 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004711 break;
4712 }
4713
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004714 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004715 if (value.IsConstant()) {
4716 int32_t v =
4717 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4718 __ movl(Address(base, offset), Immediate(v));
4719 } else {
4720 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4721 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004722 break;
4723 }
4724
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004725 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004726 if (value.IsConstant()) {
4727 int64_t v =
4728 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4729 codegen_->MoveInt64ToAddress(Address(base, offset),
4730 Address(base, offset + sizeof(int32_t)),
4731 v,
4732 instruction);
4733 maybe_record_implicit_null_check_done = true;
4734 } else {
4735 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4736 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004737 break;
4738 }
4739
Aart Bik66c158e2018-01-31 12:55:04 -08004740 case DataType::Type::kUint32:
4741 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004742 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004743 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004744 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004745 }
Calin Juravle52c48962014-12-16 17:02:57 +00004746
Mark Mendellea5af682015-10-22 17:35:49 -04004747 if (!maybe_record_implicit_null_check_done) {
4748 codegen_->MaybeRecordImplicitNullCheck(instruction);
4749 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004750
4751 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4752 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4753 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004754 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004755 }
4756
Calin Juravle52c48962014-12-16 17:02:57 +00004757 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004758 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004759 }
4760}
4761
4762void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4763 HandleFieldSet(instruction, instruction->GetFieldInfo());
4764}
4765
4766void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004767 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004768}
4769
4770void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004771 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004772}
4773
4774void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004775 HandleFieldGet(instruction, instruction->GetFieldInfo());
4776}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004777
Calin Juravle52c48962014-12-16 17:02:57 +00004778void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4779 HandleFieldGet(instruction);
4780}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004781
Calin Juravle52c48962014-12-16 17:02:57 +00004782void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4783 HandleFieldGet(instruction, instruction->GetFieldInfo());
4784}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004785
Calin Juravle52c48962014-12-16 17:02:57 +00004786void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4787 HandleFieldSet(instruction, instruction->GetFieldInfo());
4788}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004789
Calin Juravle52c48962014-12-16 17:02:57 +00004790void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004791 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004792}
4793
Calin Juravlee460d1d2015-09-29 04:52:17 +01004794void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4795 HUnresolvedInstanceFieldGet* instruction) {
4796 FieldAccessCallingConventionX86_64 calling_convention;
4797 codegen_->CreateUnresolvedFieldLocationSummary(
4798 instruction, instruction->GetFieldType(), calling_convention);
4799}
4800
4801void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4802 HUnresolvedInstanceFieldGet* instruction) {
4803 FieldAccessCallingConventionX86_64 calling_convention;
4804 codegen_->GenerateUnresolvedFieldAccess(instruction,
4805 instruction->GetFieldType(),
4806 instruction->GetFieldIndex(),
4807 instruction->GetDexPc(),
4808 calling_convention);
4809}
4810
4811void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4812 HUnresolvedInstanceFieldSet* instruction) {
4813 FieldAccessCallingConventionX86_64 calling_convention;
4814 codegen_->CreateUnresolvedFieldLocationSummary(
4815 instruction, instruction->GetFieldType(), calling_convention);
4816}
4817
4818void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4819 HUnresolvedInstanceFieldSet* instruction) {
4820 FieldAccessCallingConventionX86_64 calling_convention;
4821 codegen_->GenerateUnresolvedFieldAccess(instruction,
4822 instruction->GetFieldType(),
4823 instruction->GetFieldIndex(),
4824 instruction->GetDexPc(),
4825 calling_convention);
4826}
4827
4828void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4829 HUnresolvedStaticFieldGet* instruction) {
4830 FieldAccessCallingConventionX86_64 calling_convention;
4831 codegen_->CreateUnresolvedFieldLocationSummary(
4832 instruction, instruction->GetFieldType(), calling_convention);
4833}
4834
4835void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4836 HUnresolvedStaticFieldGet* instruction) {
4837 FieldAccessCallingConventionX86_64 calling_convention;
4838 codegen_->GenerateUnresolvedFieldAccess(instruction,
4839 instruction->GetFieldType(),
4840 instruction->GetFieldIndex(),
4841 instruction->GetDexPc(),
4842 calling_convention);
4843}
4844
4845void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4846 HUnresolvedStaticFieldSet* instruction) {
4847 FieldAccessCallingConventionX86_64 calling_convention;
4848 codegen_->CreateUnresolvedFieldLocationSummary(
4849 instruction, instruction->GetFieldType(), calling_convention);
4850}
4851
4852void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4853 HUnresolvedStaticFieldSet* instruction) {
4854 FieldAccessCallingConventionX86_64 calling_convention;
4855 codegen_->GenerateUnresolvedFieldAccess(instruction,
4856 instruction->GetFieldType(),
4857 instruction->GetFieldIndex(),
4858 instruction->GetDexPc(),
4859 calling_convention);
4860}
4861
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004862void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004863 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4864 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4865 ? Location::RequiresRegister()
4866 : Location::Any();
4867 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004868}
4869
Calin Juravle2ae48182016-03-16 14:05:09 +00004870void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4871 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004872 return;
4873 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004874 LocationSummary* locations = instruction->GetLocations();
4875 Location obj = locations->InAt(0);
4876
4877 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004878 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004879}
4880
Calin Juravle2ae48182016-03-16 14:05:09 +00004881void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004882 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004883 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004884
4885 LocationSummary* locations = instruction->GetLocations();
4886 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004887
4888 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004889 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004890 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004891 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004892 } else {
4893 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004894 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004895 __ jmp(slow_path->GetEntryLabel());
4896 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004897 }
4898 __ j(kEqual, slow_path->GetEntryLabel());
4899}
4900
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004901void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004902 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004903}
4904
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004905void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004906 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004907 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004908 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004909 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4910 object_array_get_with_read_barrier
4911 ? LocationSummary::kCallOnSlowPath
4912 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004913 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004914 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004915 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004916 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04004917 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004918 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004919 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4920 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004921 // The output overlaps for an object array get when read barriers
4922 // are enabled: we do not want the move to overwrite the array's
4923 // location, as we need it to emit the read barrier.
4924 locations->SetOut(
4925 Location::RequiresRegister(),
4926 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004927 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004928}
4929
4930void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
4931 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004932 Location obj_loc = locations->InAt(0);
4933 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004934 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004935 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01004936 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004937
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004938 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01004939 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004940 case DataType::Type::kBool:
4941 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004942 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004943 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004944 break;
4945 }
4946
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004947 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004948 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004949 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004950 break;
4951 }
4952
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004953 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004954 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07004955 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
4956 // Branch cases into compressed and uncompressed for each index's type.
4957 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
4958 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00004959 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07004960 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01004961 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
4962 "Expecting 0=compressed, 1=uncompressed");
4963 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07004964 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
4965 __ jmp(&done);
4966 __ Bind(&not_compressed);
4967 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4968 __ Bind(&done);
4969 } else {
4970 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4971 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004972 break;
4973 }
4974
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004975 case DataType::Type::kInt16: {
4976 CpuRegister out = out_loc.AsRegister<CpuRegister>();
4977 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
4978 break;
4979 }
4980
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004981 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004982 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004983 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004984 break;
4985 }
4986
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004987 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004988 static_assert(
4989 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4990 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004991 // /* HeapReference<Object> */ out =
4992 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4993 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004994 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004995 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004996 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004997 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004998 } else {
4999 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005000 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5001 codegen_->MaybeRecordImplicitNullCheck(instruction);
5002 // If read barriers are enabled, emit read barriers other than
5003 // Baker's using a slow path (and also unpoison the loaded
5004 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005005 if (index.IsConstant()) {
5006 uint32_t offset =
5007 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005008 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5009 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005010 codegen_->MaybeGenerateReadBarrierSlow(
5011 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5012 }
5013 }
5014 break;
5015 }
5016
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005017 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005018 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005019 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005020 break;
5021 }
5022
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005023 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005024 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005025 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005026 break;
5027 }
5028
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005029 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005030 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005031 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005032 break;
5033 }
5034
Aart Bik66c158e2018-01-31 12:55:04 -08005035 case DataType::Type::kUint32:
5036 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005037 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005038 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005039 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005040 }
Roland Levillain4d027112015-07-01 15:41:14 +01005041
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005042 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005043 // Potential implicit null checks, in the case of reference
5044 // arrays, are handled in the previous switch statement.
5045 } else {
5046 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005047 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005048}
5049
5050void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005051 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005052
5053 bool needs_write_barrier =
5054 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005055 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005056
Vladimir Markoca6fff82017-10-03 14:49:14 +01005057 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005058 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01005059 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005060 LocationSummary::kCallOnSlowPath :
5061 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005062
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005063 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005064 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005065 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005066 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005067 } else {
5068 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5069 }
5070
5071 if (needs_write_barrier) {
5072 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005073 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005074 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005075 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005076}
5077
5078void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5079 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005080 Location array_loc = locations->InAt(0);
5081 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005082 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005083 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005084 DataType::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005085 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005086 bool needs_write_barrier =
5087 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005088 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5089 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5090 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005091
5092 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005093 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005094 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005095 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005096 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005097 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005098 if (value.IsRegister()) {
5099 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005100 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005101 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005102 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005103 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005104 break;
5105 }
5106
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005107 case DataType::Type::kUint16:
5108 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005109 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005110 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005111 if (value.IsRegister()) {
5112 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005113 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005114 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005115 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005116 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005117 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005118 break;
5119 }
5120
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005121 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005122 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005123 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005124
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005125 if (!value.IsRegister()) {
5126 // Just setting null.
5127 DCHECK(instruction->InputAt(2)->IsNullConstant());
5128 DCHECK(value.IsConstant()) << value;
5129 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005130 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005131 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005132 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005133 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005134 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005135
5136 DCHECK(needs_write_barrier);
5137 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005138 // We cannot use a NearLabel for `done`, as its range may be too
5139 // short when Baker read barriers are enabled.
5140 Label done;
5141 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005142 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01005143 Location temp_loc = locations->GetTemp(0);
5144 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005145 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005146 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005147 codegen_->AddSlowPath(slow_path);
5148 if (instruction->GetValueCanBeNull()) {
5149 __ testl(register_value, register_value);
5150 __ j(kNotEqual, &not_null);
5151 __ movl(address, Immediate(0));
5152 codegen_->MaybeRecordImplicitNullCheck(instruction);
5153 __ jmp(&done);
5154 __ Bind(&not_null);
5155 }
5156
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005157 // Note that when Baker read barriers are enabled, the type
5158 // checks are performed without read barriers. This is fine,
5159 // even in the case where a class object is in the from-space
5160 // after the flip, as a comparison involving such a type would
5161 // not produce a false positive; it may of course produce a
5162 // false negative, in which case we would take the ArraySet
5163 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005164
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005165 // /* HeapReference<Class> */ temp = array->klass_
5166 __ movl(temp, Address(array, class_offset));
5167 codegen_->MaybeRecordImplicitNullCheck(instruction);
5168 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005169
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005170 // /* HeapReference<Class> */ temp = temp->component_type_
5171 __ movl(temp, Address(temp, component_offset));
5172 // If heap poisoning is enabled, no need to unpoison `temp`
5173 // nor the object reference in `register_value->klass`, as
5174 // we are comparing two poisoned references.
5175 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005176
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005177 if (instruction->StaticTypeOfArrayIsObjectArray()) {
5178 __ j(kEqual, &do_put);
5179 // If heap poisoning is enabled, the `temp` reference has
5180 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005181 __ MaybeUnpoisonHeapReference(temp);
5182
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005183 // If heap poisoning is enabled, no need to unpoison the
5184 // heap reference loaded below, as it is only used for a
5185 // comparison with null.
5186 __ cmpl(Address(temp, super_offset), Immediate(0));
5187 __ j(kNotEqual, slow_path->GetEntryLabel());
5188 __ Bind(&do_put);
5189 } else {
5190 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005191 }
5192 }
5193
5194 if (kPoisonHeapReferences) {
5195 __ movl(temp, register_value);
5196 __ PoisonHeapReference(temp);
5197 __ movl(address, temp);
5198 } else {
5199 __ movl(address, register_value);
5200 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005201 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005202 codegen_->MaybeRecordImplicitNullCheck(instruction);
5203 }
5204
5205 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5206 codegen_->MarkGCCard(
5207 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
5208 __ Bind(&done);
5209
5210 if (slow_path != nullptr) {
5211 __ Bind(slow_path->GetExitLabel());
5212 }
5213
5214 break;
5215 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005216
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005217 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005218 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005219 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005220 if (value.IsRegister()) {
5221 __ movl(address, value.AsRegister<CpuRegister>());
5222 } else {
5223 DCHECK(value.IsConstant()) << value;
5224 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5225 __ movl(address, Immediate(v));
5226 }
5227 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005228 break;
5229 }
5230
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005231 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005232 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005233 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005234 if (value.IsRegister()) {
5235 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005236 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005237 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005238 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005239 Address address_high =
5240 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005241 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005242 }
5243 break;
5244 }
5245
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005246 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005247 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005248 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005249 if (value.IsFpuRegister()) {
5250 __ movss(address, value.AsFpuRegister<XmmRegister>());
5251 } else {
5252 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005253 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005254 __ movl(address, Immediate(v));
5255 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005256 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005257 break;
5258 }
5259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005260 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005261 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005262 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005263 if (value.IsFpuRegister()) {
5264 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5265 codegen_->MaybeRecordImplicitNullCheck(instruction);
5266 } else {
5267 int64_t v =
5268 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005269 Address address_high =
5270 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005271 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5272 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005273 break;
5274 }
5275
Aart Bik66c158e2018-01-31 12:55:04 -08005276 case DataType::Type::kUint32:
5277 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005278 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005279 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005280 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005281 }
5282}
5283
5284void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005285 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005286 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005287 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005288 if (!instruction->IsEmittedAtUseSite()) {
5289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5290 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005291}
5292
5293void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005294 if (instruction->IsEmittedAtUseSite()) {
5295 return;
5296 }
5297
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005298 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005299 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005300 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5301 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005302 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005303 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005304 // Mask out most significant bit in case the array is String's array of char.
5305 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005306 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005307 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005308}
5309
5310void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005311 RegisterSet caller_saves = RegisterSet::Empty();
5312 InvokeRuntimeCallingConvention calling_convention;
5313 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5314 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5315 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005316 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005317 HInstruction* length = instruction->InputAt(1);
5318 if (!length->IsEmittedAtUseSite()) {
5319 locations->SetInAt(1, Location::RegisterOrConstant(length));
5320 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005321}
5322
5323void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5324 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005325 Location index_loc = locations->InAt(0);
5326 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005327 SlowPathCode* slow_path =
5328 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005329
Mark Mendell99dbd682015-04-22 16:18:52 -04005330 if (length_loc.IsConstant()) {
5331 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5332 if (index_loc.IsConstant()) {
5333 // BCE will remove the bounds check if we are guarenteed to pass.
5334 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5335 if (index < 0 || index >= length) {
5336 codegen_->AddSlowPath(slow_path);
5337 __ jmp(slow_path->GetEntryLabel());
5338 } else {
5339 // Some optimization after BCE may have generated this, and we should not
5340 // generate a bounds check if it is a valid range.
5341 }
5342 return;
5343 }
5344
5345 // We have to reverse the jump condition because the length is the constant.
5346 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5347 __ cmpl(index_reg, Immediate(length));
5348 codegen_->AddSlowPath(slow_path);
5349 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005350 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005351 HInstruction* array_length = instruction->InputAt(1);
5352 if (array_length->IsEmittedAtUseSite()) {
5353 // Address the length field in the array.
5354 DCHECK(array_length->IsArrayLength());
5355 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5356 Location array_loc = array_length->GetLocations()->InAt(0);
5357 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005358 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005359 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5360 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005361 CpuRegister length_reg = CpuRegister(TMP);
5362 __ movl(length_reg, array_len);
5363 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005364 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005365 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005366 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005367 // Checking the bound for general case:
5368 // Array of char or String's array when the compression feature off.
5369 if (index_loc.IsConstant()) {
5370 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5371 __ cmpl(array_len, Immediate(value));
5372 } else {
5373 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5374 }
5375 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005376 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005377 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005378 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005379 }
5380 codegen_->AddSlowPath(slow_path);
5381 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005382 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005383}
5384
5385void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5386 CpuRegister card,
5387 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005388 CpuRegister value,
5389 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005390 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005391 if (value_can_be_null) {
5392 __ testl(value, value);
5393 __ j(kEqual, &is_null);
5394 }
Andreas Gampe542451c2016-07-26 09:02:02 -07005395 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005396 /* no_rip */ true));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005397 __ movq(temp, object);
5398 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillain4d027112015-07-01 15:41:14 +01005399 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005400 if (value_can_be_null) {
5401 __ Bind(&is_null);
5402 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005403}
5404
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005405void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005406 LOG(FATAL) << "Unimplemented";
5407}
5408
5409void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005410 if (instruction->GetNext()->IsSuspendCheck() &&
5411 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5412 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5413 // The back edge will generate the suspend check.
5414 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5415 }
5416
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005417 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5418}
5419
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005420void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005421 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5422 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005423 // In suspend check slow path, usually there are no caller-save registers at all.
5424 // If SIMD instructions are present, however, we force spilling all live SIMD
5425 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005426 locations->SetCustomSlowPathCallerSaves(
5427 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005428}
5429
5430void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005431 HBasicBlock* block = instruction->GetBlock();
5432 if (block->GetLoopInformation() != nullptr) {
5433 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5434 // The back edge will generate the suspend check.
5435 return;
5436 }
5437 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5438 // The goto will generate the suspend check.
5439 return;
5440 }
5441 GenerateSuspendCheck(instruction, nullptr);
5442}
5443
5444void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5445 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005446 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005447 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5448 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005449 slow_path =
5450 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005451 instruction->SetSlowPath(slow_path);
5452 codegen_->AddSlowPath(slow_path);
5453 if (successor != nullptr) {
5454 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005455 }
5456 } else {
5457 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5458 }
5459
Andreas Gampe542451c2016-07-26 09:02:02 -07005460 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005461 /* no_rip */ true),
5462 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005463 if (successor == nullptr) {
5464 __ j(kNotEqual, slow_path->GetEntryLabel());
5465 __ Bind(slow_path->GetReturnLabel());
5466 } else {
5467 __ j(kEqual, codegen_->GetLabelOf(successor));
5468 __ jmp(slow_path->GetEntryLabel());
5469 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005470}
5471
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005472X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5473 return codegen_->GetAssembler();
5474}
5475
5476void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005477 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005478 Location source = move->GetSource();
5479 Location destination = move->GetDestination();
5480
5481 if (source.IsRegister()) {
5482 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005483 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005484 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005485 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005486 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005487 } else {
5488 DCHECK(destination.IsDoubleStackSlot());
5489 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005490 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005491 }
5492 } else if (source.IsStackSlot()) {
5493 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005494 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005495 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005496 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005497 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005498 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005499 } else {
5500 DCHECK(destination.IsStackSlot());
5501 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5502 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5503 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005504 } else if (source.IsDoubleStackSlot()) {
5505 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005506 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005507 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005508 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005509 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5510 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005511 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005512 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005513 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5514 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5515 }
Aart Bik5576f372017-03-23 16:17:37 -07005516 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005517 if (destination.IsFpuRegister()) {
5518 __ movups(destination.AsFpuRegister<XmmRegister>(),
5519 Address(CpuRegister(RSP), source.GetStackIndex()));
5520 } else {
5521 DCHECK(destination.IsSIMDStackSlot());
5522 size_t high = kX86_64WordSize;
5523 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5524 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5525 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5526 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5527 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005528 } else if (source.IsConstant()) {
5529 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005530 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5531 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005532 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005533 if (value == 0) {
5534 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5535 } else {
5536 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5537 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005538 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005539 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005540 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005541 }
5542 } else if (constant->IsLongConstant()) {
5543 int64_t value = constant->AsLongConstant()->GetValue();
5544 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005545 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005546 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005547 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005548 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005549 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005550 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005551 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005552 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005553 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005554 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005555 } else {
5556 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005557 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005558 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5559 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005560 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005561 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005562 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005563 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005564 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005565 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005566 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005567 } else {
5568 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005569 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005570 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005571 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005572 } else if (source.IsFpuRegister()) {
5573 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005574 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005575 } else if (destination.IsStackSlot()) {
5576 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005577 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005578 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005579 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005580 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005581 } else {
5582 DCHECK(destination.IsSIMDStackSlot());
5583 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5584 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005585 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005586 }
5587}
5588
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005589void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005590 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005591 __ movl(Address(CpuRegister(RSP), mem), reg);
5592 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005593}
5594
Mark Mendell8a1c7282015-06-29 15:41:28 -04005595void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5596 __ movq(CpuRegister(TMP), reg1);
5597 __ movq(reg1, reg2);
5598 __ movq(reg2, CpuRegister(TMP));
5599}
5600
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005601void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5602 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5603 __ movq(Address(CpuRegister(RSP), mem), reg);
5604 __ movq(reg, CpuRegister(TMP));
5605}
5606
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005607void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5608 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5609 __ movss(Address(CpuRegister(RSP), mem), reg);
5610 __ movd(reg, CpuRegister(TMP));
5611}
5612
5613void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5614 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5615 __ movsd(Address(CpuRegister(RSP), mem), reg);
5616 __ movd(reg, CpuRegister(TMP));
5617}
5618
Aart Bikcfe50bb2017-12-12 14:54:12 -08005619void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5620 size_t extra_slot = 2 * kX86_64WordSize;
5621 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5622 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5623 ExchangeMemory64(0, mem + extra_slot, 2);
5624 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5625 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5626}
5627
5628void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5629 ScratchRegisterScope ensure_scratch(
5630 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5631
5632 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5633 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5634 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5635 Address(CpuRegister(RSP), mem2 + stack_offset));
5636 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5637 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5638 CpuRegister(ensure_scratch.GetRegister()));
5639}
5640
5641void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5642 ScratchRegisterScope ensure_scratch(
5643 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5644
5645 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5646
5647 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5648 for (int i = 0; i < num_of_qwords; i++) {
5649 __ movq(CpuRegister(TMP),
5650 Address(CpuRegister(RSP), mem1 + stack_offset));
5651 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5652 Address(CpuRegister(RSP), mem2 + stack_offset));
5653 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5654 CpuRegister(TMP));
5655 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5656 CpuRegister(ensure_scratch.GetRegister()));
5657 stack_offset += kX86_64WordSize;
5658 }
5659}
5660
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005661void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005662 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005663 Location source = move->GetSource();
5664 Location destination = move->GetDestination();
5665
5666 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005667 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005668 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005669 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005670 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005671 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005672 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005673 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005674 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005675 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005676 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005677 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005678 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005679 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005680 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005681 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5682 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5683 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005684 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005685 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005686 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005687 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005688 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005689 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005690 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005691 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005692 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5693 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5694 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5695 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5696 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5697 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005698 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005699 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005700 }
5701}
5702
5703
5704void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5705 __ pushq(CpuRegister(reg));
5706}
5707
5708
5709void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5710 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005711}
5712
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005713void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005714 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005715 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5716 const size_t status_byte_offset =
5717 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
5718 constexpr uint32_t shifted_initialized_value =
5719 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
5720
5721 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005722 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005723 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005724 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005725}
5726
Vladimir Marko175e7862018-03-27 09:03:13 +00005727void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5728 CpuRegister temp) {
5729 uint32_t path_to_root = check->GetBitstringPathToRoot();
5730 uint32_t mask = check->GetBitstringMask();
5731 DCHECK(IsPowerOfTwo(mask + 1));
5732 size_t mask_bits = WhichPowerOf2(mask + 1);
5733
5734 if (mask_bits == 16u) {
5735 // Compare the bitstring in memory.
5736 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5737 } else {
5738 // /* uint32_t */ temp = temp->status_
5739 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5740 // Compare the bitstring bits using SUB.
5741 __ subl(temp, Immediate(path_to_root));
5742 // Shift out bits that do not contribute to the comparison.
5743 __ shll(temp, Immediate(32u - mask_bits));
5744 }
5745}
5746
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005747HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5748 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005749 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005750 case HLoadClass::LoadKind::kInvalid:
5751 LOG(FATAL) << "UNREACHABLE";
5752 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005753 case HLoadClass::LoadKind::kReferrersClass:
5754 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005755 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005756 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005757 case HLoadClass::LoadKind::kBssEntry:
5758 DCHECK(!Runtime::Current()->UseJitCompilation());
5759 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005760 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005761 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005762 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005763 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005764 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005765 break;
5766 }
5767 return desired_class_load_kind;
5768}
5769
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005770void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005771 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005772 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005773 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005774 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005775 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005776 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005777 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005778 return;
5779 }
Vladimir Marko41559982017-01-06 14:04:23 +00005780 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005781
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005782 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5783 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005784 ? LocationSummary::kCallOnSlowPath
5785 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005786 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005787 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005788 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005789 }
5790
Vladimir Marko41559982017-01-06 14:04:23 +00005791 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005792 locations->SetInAt(0, Location::RequiresRegister());
5793 }
5794 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005795 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5796 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5797 // Rely on the type resolution and/or initialization to save everything.
5798 // Custom calling convention: RAX serves as both input and output.
5799 RegisterSet caller_saves = RegisterSet::Empty();
5800 caller_saves.Add(Location::RegisterLocation(RAX));
5801 locations->SetCustomSlowPathCallerSaves(caller_saves);
5802 } else {
5803 // For non-Baker read barrier we have a temp-clobbering call.
5804 }
5805 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005806}
5807
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005808Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005809 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005810 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005811 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005812 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005813 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005814 PatchInfo<Label>* info = &jit_class_patches_.back();
5815 return &info->label;
5816}
5817
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005818// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5819// move.
5820void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005821 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005822 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005823 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005824 return;
5825 }
Vladimir Marko41559982017-01-06 14:04:23 +00005826 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005827
Vladimir Marko41559982017-01-06 14:04:23 +00005828 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005829 Location out_loc = locations->Out();
5830 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005831
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005832 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5833 ? kWithoutReadBarrier
5834 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005835 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005836 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005837 case HLoadClass::LoadKind::kReferrersClass: {
5838 DCHECK(!cls->CanCallRuntime());
5839 DCHECK(!cls->MustGenerateClinitCheck());
5840 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5841 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5842 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005843 cls,
5844 out_loc,
5845 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01005846 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005847 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005848 break;
5849 }
5850 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005851 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005852 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005853 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005854 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005855 break;
5856 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005857 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005858 uint32_t address = dchecked_integral_cast<uint32_t>(
5859 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5860 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07005861 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005862 break;
5863 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005864 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005865 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
5866 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005867 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005868 break;
5869 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005870 case HLoadClass::LoadKind::kBssEntry: {
5871 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5872 /* no_rip */ false);
5873 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
5874 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5875 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
5876 generate_null_check = true;
5877 break;
5878 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005879 case HLoadClass::LoadKind::kJitTableAddress: {
5880 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
5881 /* no_rip */ true);
5882 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005883 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005884 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00005885 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005886 break;
5887 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005888 default:
5889 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5890 UNREACHABLE();
5891 }
5892
5893 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5894 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005895 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005896 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5897 codegen_->AddSlowPath(slow_path);
5898 if (generate_null_check) {
5899 __ testl(out, out);
5900 __ j(kEqual, slow_path->GetEntryLabel());
5901 }
5902 if (cls->MustGenerateClinitCheck()) {
5903 GenerateClassInitializationCheck(slow_path, out);
5904 } else {
5905 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005906 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005907 }
5908}
5909
5910void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
5911 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005912 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005913 locations->SetInAt(0, Location::RequiresRegister());
5914 if (check->HasUses()) {
5915 locations->SetOut(Location::SameAsFirstInput());
5916 }
5917}
5918
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005919void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5920 // Custom calling convention: RAX serves as both input and output.
5921 Location location = Location::RegisterLocation(RAX);
5922 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5923}
5924
5925void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5926 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5927}
5928
Orion Hodson18259d72018-04-12 11:18:23 +01005929void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
5930 // Custom calling convention: RAX serves as both input and output.
5931 Location location = Location::RegisterLocation(RAX);
5932 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5933}
5934
5935void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
5936 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5937}
5938
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005939void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005940 // We assume the class to not be null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005941 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005942 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005943 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00005944 GenerateClassInitializationCheck(slow_path,
5945 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005946}
5947
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005948HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
5949 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005950 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005951 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005952 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005953 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005954 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005955 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005956 case HLoadString::LoadKind::kJitTableAddress:
5957 DCHECK(Runtime::Current()->UseJitCompilation());
5958 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005959 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005960 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005961 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005962 }
5963 return desired_string_load_kind;
5964}
5965
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005966void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005967 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005968 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005969 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07005970 locations->SetOut(Location::RegisterLocation(RAX));
5971 } else {
5972 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005973 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5974 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005975 // Rely on the pResolveString to save everything.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005976 // Custom calling convention: RAX serves as both input and output.
5977 RegisterSet caller_saves = RegisterSet::Empty();
5978 caller_saves.Add(Location::RegisterLocation(RAX));
5979 locations->SetCustomSlowPathCallerSaves(caller_saves);
5980 } else {
5981 // For non-Baker read barrier we have a temp-clobbering call.
5982 }
5983 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005984 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00005985}
5986
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005987Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005988 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005989 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005990 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005991 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005992 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005993 PatchInfo<Label>* info = &jit_string_patches_.back();
5994 return &info->label;
5995}
5996
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005997// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5998// move.
5999void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006000 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006001 Location out_loc = locations->Out();
6002 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006003
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006004 switch (load->GetLoadKind()) {
6005 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006006 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006007 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006008 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006009 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006010 }
6011 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006012 uint32_t address = dchecked_integral_cast<uint32_t>(
6013 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6014 DCHECK_NE(address, 0u);
Colin Cross0bd97172017-03-15 16:33:27 -07006015 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006016 return;
6017 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006018 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006019 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6020 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006021 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006022 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006023 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006024 case HLoadString::LoadKind::kBssEntry: {
6025 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
6026 /* no_rip */ false);
6027 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6028 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006029 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Marko174b2e22017-10-12 13:34:49 +01006030 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006031 codegen_->AddSlowPath(slow_path);
6032 __ testl(out, out);
6033 __ j(kEqual, slow_path->GetEntryLabel());
6034 __ Bind(slow_path->GetExitLabel());
6035 return;
6036 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006037 case HLoadString::LoadKind::kJitTableAddress: {
6038 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
6039 /* no_rip */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006040 Label* fixup_label = codegen_->NewJitRootStringPatch(
6041 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006042 // /* GcRoot<mirror::String> */ out = *address
6043 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6044 return;
6045 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006046 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006047 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006048 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006049
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006050 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006051 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006052 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006053 codegen_->InvokeRuntime(kQuickResolveString,
6054 load,
6055 load->GetDexPc());
6056 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006057}
6058
David Brazdilcb1c0552015-08-04 16:22:25 +01006059static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006060 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006061 /* no_rip */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006062}
6063
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006064void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6065 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006066 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006067 locations->SetOut(Location::RequiresRegister());
6068}
6069
6070void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006071 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6072}
6073
6074void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006075 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006076}
6077
6078void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6079 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006080}
6081
6082void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006083 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6084 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006085 InvokeRuntimeCallingConvention calling_convention;
6086 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6087}
6088
6089void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006090 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006091 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006092}
6093
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006094// Temp is used for read barrier.
6095static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6096 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006097 !kUseBakerReadBarrier &&
6098 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006099 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006100 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6101 return 1;
6102 }
6103 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006104}
6105
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006106// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6107// interface pointer, the current interface is compared in memory.
6108// The other checks have one temp for loading the object's class.
6109static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6110 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6111 return 2;
6112 }
6113 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006114}
6115
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006116void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006117 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006118 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006119 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006120 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006121 case TypeCheckKind::kExactCheck:
6122 case TypeCheckKind::kAbstractClassCheck:
6123 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006124 case TypeCheckKind::kArrayObjectCheck: {
6125 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6126 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6127 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006128 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006129 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006130 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006131 case TypeCheckKind::kUnresolvedCheck:
6132 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006133 call_kind = LocationSummary::kCallOnSlowPath;
6134 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006135 case TypeCheckKind::kBitstringCheck:
6136 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006137 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006138
Vladimir Markoca6fff82017-10-03 14:49:14 +01006139 LocationSummary* locations =
6140 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006141 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006142 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006143 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006144 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006145 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6146 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6147 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6148 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6149 } else {
6150 locations->SetInAt(1, Location::Any());
6151 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006152 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6153 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006154 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006155}
6156
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006157void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006158 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006159 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006160 Location obj_loc = locations->InAt(0);
6161 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006162 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006163 Location out_loc = locations->Out();
6164 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006165 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6166 DCHECK_LE(num_temps, 1u);
6167 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006168 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006169 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6170 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6171 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006172 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006173 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006174
6175 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006176 // Avoid null check if we know obj is not null.
6177 if (instruction->MustDoNullCheck()) {
6178 __ testl(obj, obj);
6179 __ j(kEqual, &zero);
6180 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006181
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006182 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006183 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006184 ReadBarrierOption read_barrier_option =
6185 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006186 // /* HeapReference<Class> */ out = obj->klass_
6187 GenerateReferenceLoadTwoRegisters(instruction,
6188 out_loc,
6189 obj_loc,
6190 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006191 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006192 if (cls.IsRegister()) {
6193 __ cmpl(out, cls.AsRegister<CpuRegister>());
6194 } else {
6195 DCHECK(cls.IsStackSlot()) << cls;
6196 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6197 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006198 if (zero.IsLinked()) {
6199 // Classes must be equal for the instanceof to succeed.
6200 __ j(kNotEqual, &zero);
6201 __ movl(out, Immediate(1));
6202 __ jmp(&done);
6203 } else {
6204 __ setcc(kEqual, out);
6205 // setcc only sets the low byte.
6206 __ andl(out, Immediate(1));
6207 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006208 break;
6209 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006210
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006211 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006212 ReadBarrierOption read_barrier_option =
6213 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006214 // /* HeapReference<Class> */ out = obj->klass_
6215 GenerateReferenceLoadTwoRegisters(instruction,
6216 out_loc,
6217 obj_loc,
6218 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006219 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006220 // If the class is abstract, we eagerly fetch the super class of the
6221 // object to avoid doing a comparison we know will fail.
6222 NearLabel loop, success;
6223 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006224 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006225 GenerateReferenceLoadOneRegister(instruction,
6226 out_loc,
6227 super_offset,
6228 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006229 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006230 __ testl(out, out);
6231 // If `out` is null, we use it for the result, and jump to `done`.
6232 __ j(kEqual, &done);
6233 if (cls.IsRegister()) {
6234 __ cmpl(out, cls.AsRegister<CpuRegister>());
6235 } else {
6236 DCHECK(cls.IsStackSlot()) << cls;
6237 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6238 }
6239 __ j(kNotEqual, &loop);
6240 __ movl(out, Immediate(1));
6241 if (zero.IsLinked()) {
6242 __ jmp(&done);
6243 }
6244 break;
6245 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006246
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006247 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006248 ReadBarrierOption read_barrier_option =
6249 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006250 // /* HeapReference<Class> */ out = obj->klass_
6251 GenerateReferenceLoadTwoRegisters(instruction,
6252 out_loc,
6253 obj_loc,
6254 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006255 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006256 // Walk over the class hierarchy to find a match.
6257 NearLabel loop, success;
6258 __ Bind(&loop);
6259 if (cls.IsRegister()) {
6260 __ cmpl(out, cls.AsRegister<CpuRegister>());
6261 } else {
6262 DCHECK(cls.IsStackSlot()) << cls;
6263 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6264 }
6265 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006266 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006267 GenerateReferenceLoadOneRegister(instruction,
6268 out_loc,
6269 super_offset,
6270 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006271 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006272 __ testl(out, out);
6273 __ j(kNotEqual, &loop);
6274 // If `out` is null, we use it for the result, and jump to `done`.
6275 __ jmp(&done);
6276 __ Bind(&success);
6277 __ movl(out, Immediate(1));
6278 if (zero.IsLinked()) {
6279 __ jmp(&done);
6280 }
6281 break;
6282 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006283
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006284 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006285 ReadBarrierOption read_barrier_option =
6286 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006287 // /* HeapReference<Class> */ out = obj->klass_
6288 GenerateReferenceLoadTwoRegisters(instruction,
6289 out_loc,
6290 obj_loc,
6291 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006292 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006293 // Do an exact check.
6294 NearLabel exact_check;
6295 if (cls.IsRegister()) {
6296 __ cmpl(out, cls.AsRegister<CpuRegister>());
6297 } else {
6298 DCHECK(cls.IsStackSlot()) << cls;
6299 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6300 }
6301 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006302 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006303 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006304 GenerateReferenceLoadOneRegister(instruction,
6305 out_loc,
6306 component_offset,
6307 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006308 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006309 __ testl(out, out);
6310 // If `out` is null, we use it for the result, and jump to `done`.
6311 __ j(kEqual, &done);
6312 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6313 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006314 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006315 __ movl(out, Immediate(1));
6316 __ jmp(&done);
6317 break;
6318 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006319
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006320 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006321 // No read barrier since the slow path will retry upon failure.
6322 // /* HeapReference<Class> */ out = obj->klass_
6323 GenerateReferenceLoadTwoRegisters(instruction,
6324 out_loc,
6325 obj_loc,
6326 class_offset,
6327 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006328 if (cls.IsRegister()) {
6329 __ cmpl(out, cls.AsRegister<CpuRegister>());
6330 } else {
6331 DCHECK(cls.IsStackSlot()) << cls;
6332 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6333 }
6334 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006335 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6336 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006337 codegen_->AddSlowPath(slow_path);
6338 __ j(kNotEqual, slow_path->GetEntryLabel());
6339 __ movl(out, Immediate(1));
6340 if (zero.IsLinked()) {
6341 __ jmp(&done);
6342 }
6343 break;
6344 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006345
Calin Juravle98893e12015-10-02 21:05:03 +01006346 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006347 case TypeCheckKind::kInterfaceCheck: {
6348 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006349 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006350 // cases.
6351 //
6352 // We cannot directly call the InstanceofNonTrivial runtime
6353 // entry point without resorting to a type checking slow path
6354 // here (i.e. by calling InvokeRuntime directly), as it would
6355 // require to assign fixed registers for the inputs of this
6356 // HInstanceOf instruction (following the runtime calling
6357 // convention), which might be cluttered by the potential first
6358 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006359 //
6360 // TODO: Introduce a new runtime entry point taking the object
6361 // to test (instead of its class) as argument, and let it deal
6362 // with the read barrier issues. This will let us refactor this
6363 // case of the `switch` code as it was previously (with a direct
6364 // call to the runtime not using a type checking slow path).
6365 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006366 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006367 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6368 instruction, /* is_fatal */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006369 codegen_->AddSlowPath(slow_path);
6370 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006371 if (zero.IsLinked()) {
6372 __ jmp(&done);
6373 }
6374 break;
6375 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006376
6377 case TypeCheckKind::kBitstringCheck: {
6378 // /* HeapReference<Class> */ temp = obj->klass_
6379 GenerateReferenceLoadTwoRegisters(instruction,
6380 out_loc,
6381 obj_loc,
6382 class_offset,
6383 kWithoutReadBarrier);
6384
6385 GenerateBitstringTypeCheckCompare(instruction, out);
6386 if (zero.IsLinked()) {
6387 __ j(kNotEqual, &zero);
6388 __ movl(out, Immediate(1));
6389 __ jmp(&done);
6390 } else {
6391 __ setcc(kEqual, out);
6392 // setcc only sets the low byte.
6393 __ andl(out, Immediate(1));
6394 }
6395 break;
6396 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006397 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006398
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006399 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006400 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006401 __ xorl(out, out);
6402 }
6403
6404 if (done.IsLinked()) {
6405 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006406 }
6407
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006408 if (slow_path != nullptr) {
6409 __ Bind(slow_path->GetExitLabel());
6410 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006411}
6412
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006413void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006414 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006415 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006416 LocationSummary* locations =
6417 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006418 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006419 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6420 // Require a register for the interface check since there is a loop that compares the class to
6421 // a memory address.
6422 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006423 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6424 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6425 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6426 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006427 } else {
6428 locations->SetInAt(1, Location::Any());
6429 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006430 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6431 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006432}
6433
6434void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006435 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006436 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006437 Location obj_loc = locations->InAt(0);
6438 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006439 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006440 Location temp_loc = locations->GetTemp(0);
6441 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006442 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6443 DCHECK_GE(num_temps, 1u);
6444 DCHECK_LE(num_temps, 2u);
6445 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006446 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6447 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6448 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6449 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6450 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6451 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006452 const uint32_t object_array_data_offset =
6453 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006454
Vladimir Marko87584542017-12-12 17:47:52 +00006455 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006456 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006457 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6458 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006459 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006460
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006461
6462 NearLabel done;
6463 // Avoid null check if we know obj is not null.
6464 if (instruction->MustDoNullCheck()) {
6465 __ testl(obj, obj);
6466 __ j(kEqual, &done);
6467 }
6468
Roland Levillain0d5a2812015-11-13 10:07:31 +00006469 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006470 case TypeCheckKind::kExactCheck:
6471 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006472 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006473 GenerateReferenceLoadTwoRegisters(instruction,
6474 temp_loc,
6475 obj_loc,
6476 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006477 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006478 if (cls.IsRegister()) {
6479 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6480 } else {
6481 DCHECK(cls.IsStackSlot()) << cls;
6482 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6483 }
6484 // Jump to slow path for throwing the exception or doing a
6485 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006486 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006487 break;
6488 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006489
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006490 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006491 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006492 GenerateReferenceLoadTwoRegisters(instruction,
6493 temp_loc,
6494 obj_loc,
6495 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006496 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006497 // If the class is abstract, we eagerly fetch the super class of the
6498 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006499 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006500 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006501 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006502 GenerateReferenceLoadOneRegister(instruction,
6503 temp_loc,
6504 super_offset,
6505 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006506 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006507
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006508 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6509 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006510 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006511 // Otherwise, compare the classes.
6512 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006513 if (cls.IsRegister()) {
6514 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6515 } else {
6516 DCHECK(cls.IsStackSlot()) << cls;
6517 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6518 }
6519 __ j(kNotEqual, &loop);
6520 break;
6521 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006522
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006523 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006524 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006525 GenerateReferenceLoadTwoRegisters(instruction,
6526 temp_loc,
6527 obj_loc,
6528 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006529 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006530 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006531 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006532 __ Bind(&loop);
6533 if (cls.IsRegister()) {
6534 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6535 } else {
6536 DCHECK(cls.IsStackSlot()) << cls;
6537 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6538 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006539 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006540
Roland Levillain0d5a2812015-11-13 10:07:31 +00006541 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006542 GenerateReferenceLoadOneRegister(instruction,
6543 temp_loc,
6544 super_offset,
6545 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006546 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006547
6548 // If the class reference currently in `temp` is not null, jump
6549 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006550 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006551 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006552 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006553 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006554 break;
6555 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006556
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006557 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006558 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006559 GenerateReferenceLoadTwoRegisters(instruction,
6560 temp_loc,
6561 obj_loc,
6562 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006563 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006564 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006565 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006566 if (cls.IsRegister()) {
6567 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6568 } else {
6569 DCHECK(cls.IsStackSlot()) << cls;
6570 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6571 }
6572 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006573
6574 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006575 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006576 GenerateReferenceLoadOneRegister(instruction,
6577 temp_loc,
6578 component_offset,
6579 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006580 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006581
6582 // If the component type is not null (i.e. the object is indeed
6583 // an array), jump to label `check_non_primitive_component_type`
6584 // to further check that this component type is not a primitive
6585 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006586 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006587 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006588 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006589 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006590 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006591 break;
6592 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006593
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006594 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006595 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006596 //
6597 // We cannot directly call the CheckCast runtime entry point
6598 // without resorting to a type checking slow path here (i.e. by
6599 // calling InvokeRuntime directly), as it would require to
6600 // assign fixed registers for the inputs of this HInstanceOf
6601 // instruction (following the runtime calling convention), which
6602 // might be cluttered by the potential first read barrier
6603 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006604 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006605 break;
6606 }
6607
Vladimir Marko175e7862018-03-27 09:03:13 +00006608 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006609 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6610 // We can not get false positives by doing this.
6611 // /* HeapReference<Class> */ temp = obj->klass_
6612 GenerateReferenceLoadTwoRegisters(instruction,
6613 temp_loc,
6614 obj_loc,
6615 class_offset,
6616 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006617
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006618 // /* HeapReference<Class> */ temp = temp->iftable_
6619 GenerateReferenceLoadTwoRegisters(instruction,
6620 temp_loc,
6621 temp_loc,
6622 iftable_offset,
6623 kWithoutReadBarrier);
6624 // Iftable is never null.
6625 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6626 // Maybe poison the `cls` for direct comparison with memory.
6627 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6628 // Loop through the iftable and check if any class matches.
6629 NearLabel start_loop;
6630 __ Bind(&start_loop);
6631 // Need to subtract first to handle the empty array case.
6632 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6633 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6634 // Go to next interface if the classes do not match.
6635 __ cmpl(cls.AsRegister<CpuRegister>(),
6636 CodeGeneratorX86_64::ArrayAddress(temp,
6637 maybe_temp2_loc,
6638 TIMES_4,
6639 object_array_data_offset));
6640 __ j(kNotEqual, &start_loop); // Return if same class.
6641 // If `cls` was poisoned above, unpoison it.
6642 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006643 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006644 }
6645
6646 case TypeCheckKind::kBitstringCheck: {
6647 // /* HeapReference<Class> */ temp = obj->klass_
6648 GenerateReferenceLoadTwoRegisters(instruction,
6649 temp_loc,
6650 obj_loc,
6651 class_offset,
6652 kWithoutReadBarrier);
6653
6654 GenerateBitstringTypeCheckCompare(instruction, temp);
6655 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6656 break;
6657 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006658 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006659
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006660 if (done.IsLinked()) {
6661 __ Bind(&done);
6662 }
6663
Roland Levillain0d5a2812015-11-13 10:07:31 +00006664 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006665}
6666
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006667void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006668 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6669 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006670 InvokeRuntimeCallingConvention calling_convention;
6671 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6672}
6673
6674void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006675 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006676 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006677 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006678 if (instruction->IsEnter()) {
6679 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6680 } else {
6681 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6682 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006683}
6684
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006685void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6686void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6687void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6688
6689void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6690 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006691 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006692 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6693 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006694 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006695 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006696 locations->SetOut(Location::SameAsFirstInput());
6697}
6698
6699void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6700 HandleBitwiseOperation(instruction);
6701}
6702
6703void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6704 HandleBitwiseOperation(instruction);
6705}
6706
6707void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6708 HandleBitwiseOperation(instruction);
6709}
6710
6711void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6712 LocationSummary* locations = instruction->GetLocations();
6713 Location first = locations->InAt(0);
6714 Location second = locations->InAt(1);
6715 DCHECK(first.Equals(locations->Out()));
6716
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006717 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006718 if (second.IsRegister()) {
6719 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006720 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006721 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006722 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006723 } else {
6724 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006725 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006726 }
6727 } else if (second.IsConstant()) {
6728 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6729 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006730 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006731 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006732 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006733 } else {
6734 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006735 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006736 }
6737 } else {
6738 Address address(CpuRegister(RSP), second.GetStackIndex());
6739 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006740 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006741 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006742 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006743 } else {
6744 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006745 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006746 }
6747 }
6748 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006749 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006750 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6751 bool second_is_constant = false;
6752 int64_t value = 0;
6753 if (second.IsConstant()) {
6754 second_is_constant = true;
6755 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006756 }
Mark Mendell40741f32015-04-20 22:10:34 -04006757 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006758
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006759 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006760 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006761 if (is_int32_value) {
6762 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6763 } else {
6764 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6765 }
6766 } else if (second.IsDoubleStackSlot()) {
6767 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006768 } else {
6769 __ andq(first_reg, second.AsRegister<CpuRegister>());
6770 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006771 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006772 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006773 if (is_int32_value) {
6774 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6775 } else {
6776 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6777 }
6778 } else if (second.IsDoubleStackSlot()) {
6779 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006780 } else {
6781 __ orq(first_reg, second.AsRegister<CpuRegister>());
6782 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006783 } else {
6784 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006785 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006786 if (is_int32_value) {
6787 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6788 } else {
6789 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6790 }
6791 } else if (second.IsDoubleStackSlot()) {
6792 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006793 } else {
6794 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6795 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006796 }
6797 }
6798}
6799
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006800void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6801 HInstruction* instruction,
6802 Location out,
6803 uint32_t offset,
6804 Location maybe_temp,
6805 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006806 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006807 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006808 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006809 if (kUseBakerReadBarrier) {
6810 // Load with fast path based Baker's read barrier.
6811 // /* HeapReference<Object> */ out = *(out + offset)
6812 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006813 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006814 } else {
6815 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006816 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006817 // in the following move operation, as we will need it for the
6818 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006819 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006820 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006821 // /* HeapReference<Object> */ out = *(out + offset)
6822 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006823 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006824 }
6825 } else {
6826 // Plain load with no read barrier.
6827 // /* HeapReference<Object> */ out = *(out + offset)
6828 __ movl(out_reg, Address(out_reg, offset));
6829 __ MaybeUnpoisonHeapReference(out_reg);
6830 }
6831}
6832
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006833void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6834 HInstruction* instruction,
6835 Location out,
6836 Location obj,
6837 uint32_t offset,
6838 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006839 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6840 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006841 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006842 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006843 if (kUseBakerReadBarrier) {
6844 // Load with fast path based Baker's read barrier.
6845 // /* HeapReference<Object> */ out = *(obj + offset)
6846 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006847 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006848 } else {
6849 // Load with slow path based read barrier.
6850 // /* HeapReference<Object> */ out = *(obj + offset)
6851 __ movl(out_reg, Address(obj_reg, offset));
6852 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6853 }
6854 } else {
6855 // Plain load with no read barrier.
6856 // /* HeapReference<Object> */ out = *(obj + offset)
6857 __ movl(out_reg, Address(obj_reg, offset));
6858 __ MaybeUnpoisonHeapReference(out_reg);
6859 }
6860}
6861
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006862void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
6863 HInstruction* instruction,
6864 Location root,
6865 const Address& address,
6866 Label* fixup_label,
6867 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006868 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006869 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006870 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006871 if (kUseBakerReadBarrier) {
6872 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6873 // Baker's read barrier are used:
6874 //
Roland Levillaind966ce72017-02-09 16:20:14 +00006875 // root = obj.field;
6876 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6877 // if (temp != null) {
6878 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006879 // }
6880
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006881 // /* GcRoot<mirror::Object> */ root = *address
6882 __ movl(root_reg, address);
6883 if (fixup_label != nullptr) {
6884 __ Bind(fixup_label);
6885 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006886 static_assert(
6887 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6888 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6889 "have different sizes.");
6890 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6891 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6892 "have different sizes.");
6893
Vladimir Marko953437b2016-08-24 08:30:46 +00006894 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006895 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006896 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006897 codegen_->AddSlowPath(slow_path);
6898
Roland Levillaind966ce72017-02-09 16:20:14 +00006899 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
6900 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01006901 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Roland Levillaind966ce72017-02-09 16:20:14 +00006902 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip */ true), Immediate(0));
6903 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006904 __ j(kNotEqual, slow_path->GetEntryLabel());
6905 __ Bind(slow_path->GetExitLabel());
6906 } else {
6907 // GC root loaded through a slow path for read barriers other
6908 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006909 // /* GcRoot<mirror::Object>* */ root = address
6910 __ leaq(root_reg, address);
6911 if (fixup_label != nullptr) {
6912 __ Bind(fixup_label);
6913 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006914 // /* mirror::Object* */ root = root->Read()
6915 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6916 }
6917 } else {
6918 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006919 // /* GcRoot<mirror::Object> */ root = *address
6920 __ movl(root_reg, address);
6921 if (fixup_label != nullptr) {
6922 __ Bind(fixup_label);
6923 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006924 // Note that GC roots are not affected by heap poisoning, thus we
6925 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006926 }
6927}
6928
6929void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6930 Location ref,
6931 CpuRegister obj,
6932 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006933 bool needs_null_check) {
6934 DCHECK(kEmitCompilerReadBarrier);
6935 DCHECK(kUseBakerReadBarrier);
6936
6937 // /* HeapReference<Object> */ ref = *(obj + offset)
6938 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006939 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006940}
6941
6942void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6943 Location ref,
6944 CpuRegister obj,
6945 uint32_t data_offset,
6946 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006947 bool needs_null_check) {
6948 DCHECK(kEmitCompilerReadBarrier);
6949 DCHECK(kUseBakerReadBarrier);
6950
Roland Levillain3d312422016-06-23 13:53:42 +01006951 static_assert(
6952 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6953 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006954 // /* HeapReference<Object> */ ref =
6955 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01006956 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00006957 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006958}
6959
6960void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6961 Location ref,
6962 CpuRegister obj,
6963 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006964 bool needs_null_check,
6965 bool always_update_field,
6966 CpuRegister* temp1,
6967 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006968 DCHECK(kEmitCompilerReadBarrier);
6969 DCHECK(kUseBakerReadBarrier);
6970
6971 // In slow path based read barriers, the read barrier call is
6972 // inserted after the original load. However, in fast path based
6973 // Baker's read barriers, we need to perform the load of
6974 // mirror::Object::monitor_ *before* the original reference load.
6975 // This load-load ordering is required by the read barrier.
6976 // The fast path/slow path (for Baker's algorithm) should look like:
6977 //
6978 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6979 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6980 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006981 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006982 // if (is_gray) {
6983 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6984 // }
6985 //
6986 // Note: the original implementation in ReadBarrier::Barrier is
6987 // slightly more complex as:
6988 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006989 // the high-bits of rb_state, which are expected to be all zeroes
6990 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
6991 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006992 // - it performs additional checks that we do not do here for
6993 // performance reasons.
6994
6995 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006996 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6997
Vladimir Marko953437b2016-08-24 08:30:46 +00006998 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07006999 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
7000 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007001 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7002 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7003 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7004
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007005 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007006 // ref = ReadBarrier::Mark(ref);
7007 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7008 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007009 if (needs_null_check) {
7010 MaybeRecordImplicitNullCheck(instruction);
7011 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007012
7013 // Load fence to prevent load-load reordering.
7014 // Note that this is a no-op, thanks to the x86-64 memory model.
7015 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7016
7017 // The actual reference load.
7018 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007019 __ movl(ref_reg, src); // Flags are unaffected.
7020
7021 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7022 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007023 SlowPathCode* slow_path;
7024 if (always_update_field) {
7025 DCHECK(temp1 != nullptr);
7026 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007027 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007028 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp1, *temp2);
7029 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007030 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007031 instruction, ref, /* unpoison_ref_before_marking */ true);
7032 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007033 AddSlowPath(slow_path);
7034
7035 // We have done the "if" of the gray bit check above, now branch based on the flags.
7036 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007037
7038 // Object* ref = ref_addr->AsMirrorPtr()
7039 __ MaybeUnpoisonHeapReference(ref_reg);
7040
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007041 __ Bind(slow_path->GetExitLabel());
7042}
7043
7044void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7045 Location out,
7046 Location ref,
7047 Location obj,
7048 uint32_t offset,
7049 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007050 DCHECK(kEmitCompilerReadBarrier);
7051
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007052 // Insert a slow path based read barrier *after* the reference load.
7053 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007054 // If heap poisoning is enabled, the unpoisoning of the loaded
7055 // reference will be carried out by the runtime within the slow
7056 // path.
7057 //
7058 // Note that `ref` currently does not get unpoisoned (when heap
7059 // poisoning is enabled), which is alright as the `ref` argument is
7060 // not used by the artReadBarrierSlow entry point.
7061 //
7062 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007063 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007064 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7065 AddSlowPath(slow_path);
7066
Roland Levillain0d5a2812015-11-13 10:07:31 +00007067 __ jmp(slow_path->GetEntryLabel());
7068 __ Bind(slow_path->GetExitLabel());
7069}
7070
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007071void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7072 Location out,
7073 Location ref,
7074 Location obj,
7075 uint32_t offset,
7076 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007077 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007078 // Baker's read barriers shall be handled by the fast path
7079 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7080 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007081 // If heap poisoning is enabled, unpoisoning will be taken care of
7082 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007083 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007084 } else if (kPoisonHeapReferences) {
7085 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7086 }
7087}
7088
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007089void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7090 Location out,
7091 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007092 DCHECK(kEmitCompilerReadBarrier);
7093
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007094 // Insert a slow path based read barrier *after* the GC root load.
7095 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007096 // Note that GC roots are not affected by heap poisoning, so we do
7097 // not need to do anything special for this here.
7098 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007099 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007100 AddSlowPath(slow_path);
7101
Roland Levillain0d5a2812015-11-13 10:07:31 +00007102 __ jmp(slow_path->GetEntryLabel());
7103 __ Bind(slow_path->GetExitLabel());
7104}
7105
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007106void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007107 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007108 LOG(FATAL) << "Unreachable";
7109}
7110
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007111void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007112 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007113 LOG(FATAL) << "Unreachable";
7114}
7115
Mark Mendellfe57faa2015-09-18 09:26:15 -04007116// Simple implementation of packed switch - generate cascaded compare/jumps.
7117void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7118 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007119 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007120 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007121 locations->AddTemp(Location::RequiresRegister());
7122 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007123}
7124
7125void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7126 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007127 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007128 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007129 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7130 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7131 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007132 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7133
7134 // Should we generate smaller inline compare/jumps?
7135 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7136 // Figure out the correct compare values and jump conditions.
7137 // Handle the first compare/branch as a special case because it might
7138 // jump to the default case.
7139 DCHECK_GT(num_entries, 2u);
7140 Condition first_condition;
7141 uint32_t index;
7142 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7143 if (lower_bound != 0) {
7144 first_condition = kLess;
7145 __ cmpl(value_reg_in, Immediate(lower_bound));
7146 __ j(first_condition, codegen_->GetLabelOf(default_block));
7147 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7148
7149 index = 1;
7150 } else {
7151 // Handle all the compare/jumps below.
7152 first_condition = kBelow;
7153 index = 0;
7154 }
7155
7156 // Handle the rest of the compare/jumps.
7157 for (; index + 1 < num_entries; index += 2) {
7158 int32_t compare_to_value = lower_bound + index + 1;
7159 __ cmpl(value_reg_in, Immediate(compare_to_value));
7160 // Jump to successors[index] if value < case_value[index].
7161 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7162 // Jump to successors[index + 1] if value == case_value[index + 1].
7163 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7164 }
7165
7166 if (index != num_entries) {
7167 // There are an odd number of entries. Handle the last one.
7168 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007169 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007170 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7171 }
7172
7173 // And the default for any other value.
7174 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7175 __ jmp(codegen_->GetLabelOf(default_block));
7176 }
7177 return;
7178 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007179
7180 // Remove the bias, if needed.
7181 Register value_reg_out = value_reg_in.AsRegister();
7182 if (lower_bound != 0) {
7183 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7184 value_reg_out = temp_reg.AsRegister();
7185 }
7186 CpuRegister value_reg(value_reg_out);
7187
7188 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007189 __ cmpl(value_reg, Immediate(num_entries - 1));
7190 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007191
Mark Mendell9c86b482015-09-18 13:36:07 -04007192 // We are in the range of the table.
7193 // Load the address of the jump table in the constant area.
7194 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007195
Mark Mendell9c86b482015-09-18 13:36:07 -04007196 // Load the (signed) offset from the jump table.
7197 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7198
7199 // Add the offset to the address of the table base.
7200 __ addq(temp_reg, base_reg);
7201
7202 // And jump.
7203 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007204}
7205
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007206void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7207 ATTRIBUTE_UNUSED) {
7208 LOG(FATAL) << "Unreachable";
7209}
7210
7211void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7212 ATTRIBUTE_UNUSED) {
7213 LOG(FATAL) << "Unreachable";
7214}
7215
Aart Bikc5d47542016-01-27 17:00:35 -08007216void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7217 if (value == 0) {
7218 __ xorl(dest, dest);
7219 } else {
7220 __ movl(dest, Immediate(value));
7221 }
7222}
7223
Mark Mendell92e83bf2015-05-07 11:25:03 -04007224void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7225 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007226 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007227 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007228 } else if (IsUint<32>(value)) {
7229 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007230 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7231 } else {
7232 __ movq(dest, Immediate(value));
7233 }
7234}
7235
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007236void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7237 if (value == 0) {
7238 __ xorps(dest, dest);
7239 } else {
7240 __ movss(dest, LiteralInt32Address(value));
7241 }
7242}
7243
7244void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7245 if (value == 0) {
7246 __ xorpd(dest, dest);
7247 } else {
7248 __ movsd(dest, LiteralInt64Address(value));
7249 }
7250}
7251
7252void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7253 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7254}
7255
7256void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7257 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7258}
7259
Aart Bika19616e2016-02-01 18:57:58 -08007260void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7261 if (value == 0) {
7262 __ testl(dest, dest);
7263 } else {
7264 __ cmpl(dest, Immediate(value));
7265 }
7266}
7267
7268void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7269 if (IsInt<32>(value)) {
7270 if (value == 0) {
7271 __ testq(dest, dest);
7272 } else {
7273 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7274 }
7275 } else {
7276 // Value won't fit in an int.
7277 __ cmpq(dest, LiteralInt64Address(value));
7278 }
7279}
7280
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007281void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7282 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007283 GenerateIntCompare(lhs_reg, rhs);
7284}
7285
7286void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007287 if (rhs.IsConstant()) {
7288 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007289 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007290 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007291 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007292 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007293 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007294 }
7295}
7296
7297void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7298 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7299 if (rhs.IsConstant()) {
7300 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7301 Compare64BitValue(lhs_reg, value);
7302 } else if (rhs.IsDoubleStackSlot()) {
7303 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7304 } else {
7305 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7306 }
7307}
7308
7309Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7310 Location index,
7311 ScaleFactor scale,
7312 uint32_t data_offset) {
7313 return index.IsConstant() ?
7314 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7315 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7316}
7317
Mark Mendellcfa410b2015-05-25 16:02:44 -04007318void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7319 DCHECK(dest.IsDoubleStackSlot());
7320 if (IsInt<32>(value)) {
7321 // Can move directly as an int32 constant.
7322 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7323 Immediate(static_cast<int32_t>(value)));
7324 } else {
7325 Load64BitValue(CpuRegister(TMP), value);
7326 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7327 }
7328}
7329
Mark Mendell9c86b482015-09-18 13:36:07 -04007330/**
7331 * Class to handle late fixup of offsets into constant area.
7332 */
7333class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7334 public:
7335 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7336 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7337
7338 protected:
7339 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7340
7341 CodeGeneratorX86_64* codegen_;
7342
7343 private:
7344 void Process(const MemoryRegion& region, int pos) OVERRIDE {
7345 // Patch the correct offset for the instruction. We use the address of the
7346 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7347 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7348 int32_t relative_position = constant_offset - pos;
7349
7350 // Patch in the right value.
7351 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7352 }
7353
7354 // Location in constant area that the fixup refers to.
7355 size_t offset_into_constant_area_;
7356};
7357
7358/**
7359 t * Class to handle late fixup of offsets to a jump table that will be created in the
7360 * constant area.
7361 */
7362class JumpTableRIPFixup : public RIPFixup {
7363 public:
7364 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7365 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7366
7367 void CreateJumpTable() {
7368 X86_64Assembler* assembler = codegen_->GetAssembler();
7369
7370 // Ensure that the reference to the jump table has the correct offset.
7371 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7372 SetOffset(offset_in_constant_table);
7373
7374 // Compute the offset from the start of the function to this jump table.
7375 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7376
7377 // Populate the jump table with the correct values for the jump table.
7378 int32_t num_entries = switch_instr_->GetNumEntries();
7379 HBasicBlock* block = switch_instr_->GetBlock();
7380 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7381 // The value that we want is the target offset - the position of the table.
7382 for (int32_t i = 0; i < num_entries; i++) {
7383 HBasicBlock* b = successors[i];
7384 Label* l = codegen_->GetLabelOf(b);
7385 DCHECK(l->IsBound());
7386 int32_t offset_to_block = l->Position() - current_table_offset;
7387 assembler->AppendInt32(offset_to_block);
7388 }
7389 }
7390
7391 private:
7392 const HPackedSwitch* switch_instr_;
7393};
7394
Mark Mendellf55c3e02015-03-26 21:07:46 -04007395void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7396 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007397 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007398 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7399 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007400 assembler->Align(4, 0);
7401 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007402
7403 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007404 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007405 jump_table->CreateJumpTable();
7406 }
7407
7408 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007409 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007410 }
7411
7412 // And finish up.
7413 CodeGenerator::Finalize(allocator);
7414}
7415
Mark Mendellf55c3e02015-03-26 21:07:46 -04007416Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007417 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007418 return Address::RIP(fixup);
7419}
7420
7421Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007422 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007423 return Address::RIP(fixup);
7424}
7425
7426Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007427 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007428 return Address::RIP(fixup);
7429}
7430
7431Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007432 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007433 return Address::RIP(fixup);
7434}
7435
Andreas Gampe85b62f22015-09-09 13:15:38 -07007436// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007437void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007438 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007439 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007440 return;
7441 }
7442
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007443 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007444
7445 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7446 if (trg.Equals(return_loc)) {
7447 return;
7448 }
7449
7450 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007451 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007452 parallel_move.AddMove(return_loc, trg, type, nullptr);
7453 GetMoveResolver()->EmitNativeCode(&parallel_move);
7454}
7455
Mark Mendell9c86b482015-09-18 13:36:07 -04007456Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7457 // Create a fixup to be used to create and address the jump table.
7458 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007459 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007460
7461 // We have to populate the jump tables.
7462 fixups_to_jump_tables_.push_back(table_fixup);
7463 return Address::RIP(table_fixup);
7464}
7465
Mark Mendellea5af682015-10-22 17:35:49 -04007466void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7467 const Address& addr_high,
7468 int64_t v,
7469 HInstruction* instruction) {
7470 if (IsInt<32>(v)) {
7471 int32_t v_32 = v;
7472 __ movq(addr_low, Immediate(v_32));
7473 MaybeRecordImplicitNullCheck(instruction);
7474 } else {
7475 // Didn't fit in a register. Do it in pieces.
7476 int32_t low_v = Low32Bits(v);
7477 int32_t high_v = High32Bits(v);
7478 __ movl(addr_low, Immediate(low_v));
7479 MaybeRecordImplicitNullCheck(instruction);
7480 __ movl(addr_high, Immediate(high_v));
7481 }
7482}
7483
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007484void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7485 const uint8_t* roots_data,
7486 const PatchInfo<Label>& info,
7487 uint64_t index_in_table) const {
7488 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7489 uintptr_t address =
7490 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7491 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7492 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7493 dchecked_integral_cast<uint32_t>(address);
7494}
7495
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007496void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7497 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007498 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007499 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007500 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007501 }
7502
7503 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007504 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007505 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007506 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007507 }
7508}
7509
Roland Levillain4d027112015-07-01 15:41:14 +01007510#undef __
7511
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007512} // namespace x86_64
7513} // namespace art