blob: 7c293b86051463ea3b9bf988362dfe4952af7e40 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070026#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080027#include "intrinsics.h"
28#include "intrinsics_x86_64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010029#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070030#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070031#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070032#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010033#include "mirror/object_reference.h"
34#include "thread.h"
35#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010036#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010037#include "utils/x86_64/assembler_x86_64.h"
38#include "utils/x86_64/managed_register_x86_64.h"
39
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010040namespace art {
41
Roland Levillain0d5a2812015-11-13 10:07:31 +000042template<class MirrorType>
43class GcRoot;
44
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010045namespace x86_64 {
46
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010047static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010048static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000049// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
50// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
51// generates less code/data with a small num_entries.
52static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010053
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000054static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000055static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010056
Mark Mendell24f2dfa2015-01-14 19:51:45 -050057static constexpr int kC2ConditionMask = 0x400;
58
Vladimir Marko3232dbb2018-07-25 15:42:46 +010059static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
60 // Custom calling convention: RAX serves as both input and output.
61 RegisterSet caller_saves = RegisterSet::Empty();
62 caller_saves.Add(Location::RegisterLocation(RAX));
63 return caller_saves;
64}
65
Roland Levillain7cbd27f2016-08-11 23:53:33 +010066// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
67#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070068#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010069
Andreas Gampe85b62f22015-09-09 13:15:38 -070070class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000072 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010074 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000075 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010076 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000077 if (instruction_->CanThrowIntoCatchBlock()) {
78 // Live registers will be restored in the catch block if caught.
79 SaveLiveRegisters(codegen, instruction_->GetLocations());
80 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010081 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000082 instruction_,
83 instruction_->GetDexPc(),
84 this);
Roland Levillain888d0672015-11-23 18:53:50 +000085 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010086 }
87
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010088 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010089
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010090 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010091
Nicolas Geoffraye5038322014-07-04 09:41:32 +010092 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010093 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
94};
95
Andreas Gampe85b62f22015-09-09 13:15:38 -070096class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000097 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000098 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000099
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100100 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000101 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000102 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100103 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000104 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000105 }
106
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100107 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100108
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100109 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100110
Calin Juravled0d48522014-11-04 16:40:20 +0000111 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000112 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
113};
114
Andreas Gampe85b62f22015-09-09 13:15:38 -0700115class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000116 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100117 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000118 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000119
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100120 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000121 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100122 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000123 if (is_div_) {
124 __ negl(cpu_reg_);
125 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400126 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000127 }
128
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000129 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100130 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000131 if (is_div_) {
132 __ negq(cpu_reg_);
133 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400134 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000135 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000136 }
Calin Juravled0d48522014-11-04 16:40:20 +0000137 __ jmp(GetExitLabel());
138 }
139
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100140 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100141
Calin Juravled0d48522014-11-04 16:40:20 +0000142 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000143 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100144 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000145 const bool is_div_;
146 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000147};
148
Andreas Gampe85b62f22015-09-09 13:15:38 -0700149class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000150 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100151 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000152 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000153
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100154 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700155 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000156 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000157 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700158 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100159 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000160 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700161 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100162 if (successor_ == nullptr) {
163 __ jmp(GetReturnLabel());
164 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000165 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100166 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000167 }
168
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100169 Label* GetReturnLabel() {
170 DCHECK(successor_ == nullptr);
171 return &return_label_;
172 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000173
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100174 HBasicBlock* GetSuccessor() const {
175 return successor_;
176 }
177
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100178 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100179
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000180 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100181 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000182 Label return_label_;
183
184 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
185};
186
Andreas Gampe85b62f22015-09-09 13:15:38 -0700187class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100188 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100189 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000190 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100191
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100192 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100193 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000194 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100195 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000196 if (instruction_->CanThrowIntoCatchBlock()) {
197 // Live registers will be restored in the catch block if caught.
198 SaveLiveRegisters(codegen, instruction_->GetLocations());
199 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400200 // Are we using an array length from memory?
201 HInstruction* array_length = instruction_->InputAt(1);
202 Location length_loc = locations->InAt(1);
203 InvokeRuntimeCallingConvention calling_convention;
204 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
205 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100206 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100207 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400208 Location array_loc = array_length->GetLocations()->InAt(0);
209 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
210 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
211 // Check for conflicts with index.
212 if (length_loc.Equals(locations->InAt(0))) {
213 // We know we aren't using parameter 2.
214 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
215 }
216 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100217 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100218 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700219 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400220 }
221
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000222 // We're moving two locations to locations that could overlap, so we need a parallel
223 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000224 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100227 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400228 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100229 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100230 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100231 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
232 ? kQuickThrowStringBounds
233 : kQuickThrowArrayBounds;
234 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100235 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000236 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100237 }
238
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100239 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100240
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100241 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100242
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100243 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100244 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
245};
246
Andreas Gampe85b62f22015-09-09 13:15:38 -0700247class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100248 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100249 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
250 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000251 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100252 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100254
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100255 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000256 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100257 Location out = locations->Out();
258 const uint32_t dex_pc = instruction_->GetDexPc();
259 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
260 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
261
Roland Levillain0d5a2812015-11-13 10:07:31 +0000262 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100263 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000264 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000265
Vladimir Markoea4c1262017-02-06 19:59:33 +0000266 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100267 if (must_resolve_type) {
268 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
269 dex::TypeIndex type_index = cls_->GetTypeIndex();
270 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100271 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
272 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100273 // If we also must_do_clinit, the resolved type is now in the correct register.
274 } else {
275 DCHECK(must_do_clinit);
276 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
277 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
278 }
279 if (must_do_clinit) {
280 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
281 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000282 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100283
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000284 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000285 if (out.IsValid()) {
286 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000287 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000288 }
289
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000290 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100291 __ jmp(GetExitLabel());
292 }
293
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100294 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100295
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100296 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000297 // The class this slow path will load.
298 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100299
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000300 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100301};
302
Vladimir Markoaad75c62016-10-03 08:46:48 +0000303class LoadStringSlowPathX86_64 : public SlowPathCode {
304 public:
305 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
306
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100307 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000308 LocationSummary* locations = instruction_->GetLocations();
309 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
310
311 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
312 __ Bind(GetEntryLabel());
313 SaveLiveRegisters(codegen, locations);
314
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000315 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100316 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000317 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000318 x86_64_codegen->InvokeRuntime(kQuickResolveString,
319 instruction_,
320 instruction_->GetDexPc(),
321 this);
322 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
323 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
324 RestoreLiveRegisters(codegen, locations);
325
Vladimir Markoaad75c62016-10-03 08:46:48 +0000326 __ jmp(GetExitLabel());
327 }
328
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100329 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000330
331 private:
332 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
333};
334
Andreas Gampe85b62f22015-09-09 13:15:38 -0700335class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000336 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000337 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000338 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000339
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100340 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100342 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000343 DCHECK(instruction_->IsCheckCast()
344 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000345
Roland Levillain0d5a2812015-11-13 10:07:31 +0000346 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000348
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000349 if (kPoisonHeapReferences &&
350 instruction_->IsCheckCast() &&
351 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
352 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
353 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
354 }
355
Vladimir Marko87584542017-12-12 17:47:52 +0000356 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000357 SaveLiveRegisters(codegen, locations);
358 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000359
360 // We're moving two locations to locations that could overlap, so we need a parallel
361 // move resolver.
362 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800363 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800364 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100365 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800366 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800367 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100368 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000369 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100370 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800371 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000372 } else {
373 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800374 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
375 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000376 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000377
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000378 if (!is_fatal_) {
379 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000380 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000381 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000382
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 RestoreLiveRegisters(codegen, locations);
384 __ jmp(GetExitLabel());
385 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000386 }
387
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100388 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100389
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100390 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000391
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000392 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000393 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000394
395 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
396};
397
Andreas Gampe85b62f22015-09-09 13:15:38 -0700398class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700399 public:
Aart Bik42249c32016-01-07 15:33:50 -0800400 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000401 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700402
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100403 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000404 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700405 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100406 LocationSummary* locations = instruction_->GetLocations();
407 SaveLiveRegisters(codegen, locations);
408 InvokeRuntimeCallingConvention calling_convention;
409 x86_64_codegen->Load32BitValue(
410 CpuRegister(calling_convention.GetRegisterAt(0)),
411 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100412 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100413 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700414 }
415
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100416 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100417
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700418 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700419 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
420};
421
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100422class ArraySetSlowPathX86_64 : public SlowPathCode {
423 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000424 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100425
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100426 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427 LocationSummary* locations = instruction_->GetLocations();
428 __ Bind(GetEntryLabel());
429 SaveLiveRegisters(codegen, locations);
430
431 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100432 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100433 parallel_move.AddMove(
434 locations->InAt(0),
435 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100436 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100437 nullptr);
438 parallel_move.AddMove(
439 locations->InAt(1),
440 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100441 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100442 nullptr);
443 parallel_move.AddMove(
444 locations->InAt(2),
445 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100446 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100447 nullptr);
448 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
449
Roland Levillain0d5a2812015-11-13 10:07:31 +0000450 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100451 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000452 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100453 RestoreLiveRegisters(codegen, locations);
454 __ jmp(GetExitLabel());
455 }
456
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100457 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100458
459 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100460 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
461};
462
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100463// Slow path marking an object reference `ref` during a read
464// barrier. The field `obj.field` in the object `obj` holding this
465// reference does not get updated by this slow path after marking (see
466// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
467//
468// This means that after the execution of this slow path, `ref` will
469// always be up-to-date, but `obj.field` may not; i.e., after the
470// flip, `ref` will be a to-space reference, but `obj.field` will
471// probably still be a from-space reference (unless it gets updated by
472// another thread, or if another thread installed another object
473// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000474class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
475 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100476 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
477 Location ref,
478 bool unpoison_ref_before_marking)
479 : SlowPathCode(instruction),
480 ref_(ref),
481 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000482 DCHECK(kEmitCompilerReadBarrier);
483 }
484
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100485 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100489 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
490 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000491 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100492 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(instruction_->IsInstanceFieldGet() ||
494 instruction_->IsStaticFieldGet() ||
495 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100496 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000497 instruction_->IsLoadClass() ||
498 instruction_->IsLoadString() ||
499 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100500 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100501 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
502 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000503 << "Unexpected instruction in read barrier marking slow path: "
504 << instruction_->DebugName();
505
506 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100507 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000508 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000510 }
Roland Levillain4359e612016-07-20 11:32:19 +0100511 // No need to save live registers; it's taken care of by the
512 // entrypoint. Also, there is no need to update the stack mask,
513 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000514 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100515 DCHECK_NE(ref_reg, RSP);
516 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100517 // "Compact" slow path, saving two moves.
518 //
519 // Instead of using the standard runtime calling convention (input
520 // and output in R0):
521 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100522 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100523 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100525 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100527 // of a dedicated entrypoint:
528 //
529 // rX <- ReadBarrierMarkRegX(rX)
530 //
531 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100532 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100533 // This runtime call does not require a stack map.
534 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000535 __ jmp(GetExitLabel());
536 }
537
538 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100539 // The location (register) of the marked object reference.
540 const Location ref_;
541 // Should the reference in `ref_` be unpoisoned prior to marking it?
542 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000543
544 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
545};
546
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100547// Slow path marking an object reference `ref` during a read barrier,
548// and if needed, atomically updating the field `obj.field` in the
549// object `obj` holding this reference after marking (contrary to
550// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
551// `obj.field`).
552//
553// This means that after the execution of this slow path, both `ref`
554// and `obj.field` will be up-to-date; i.e., after the flip, both will
555// hold the same to-space reference (unless another thread installed
556// another object reference (different from `ref`) in `obj.field`).
557class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
558 public:
559 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
560 Location ref,
561 CpuRegister obj,
562 const Address& field_addr,
563 bool unpoison_ref_before_marking,
564 CpuRegister temp1,
565 CpuRegister temp2)
566 : SlowPathCode(instruction),
567 ref_(ref),
568 obj_(obj),
569 field_addr_(field_addr),
570 unpoison_ref_before_marking_(unpoison_ref_before_marking),
571 temp1_(temp1),
572 temp2_(temp2) {
573 DCHECK(kEmitCompilerReadBarrier);
574 }
575
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100576 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100577 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
578 }
579
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100580 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100581 LocationSummary* locations = instruction_->GetLocations();
582 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
583 Register ref_reg = ref_cpu_reg.AsRegister();
584 DCHECK(locations->CanCall());
585 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
586 // This slow path is only used by the UnsafeCASObject intrinsic.
587 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
588 << "Unexpected instruction in read barrier marking and field updating slow path: "
589 << instruction_->DebugName();
590 DCHECK(instruction_->GetLocations()->Intrinsified());
591 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
592
593 __ Bind(GetEntryLabel());
594 if (unpoison_ref_before_marking_) {
595 // Object* ref = ref_addr->AsMirrorPtr()
596 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
597 }
598
599 // Save the old (unpoisoned) reference.
600 __ movl(temp1_, ref_cpu_reg);
601
602 // No need to save live registers; it's taken care of by the
603 // entrypoint. Also, there is no need to update the stack mask,
604 // as this runtime call will not trigger a garbage collection.
605 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
606 DCHECK_NE(ref_reg, RSP);
607 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
608 // "Compact" slow path, saving two moves.
609 //
610 // Instead of using the standard runtime calling convention (input
611 // and output in R0):
612 //
613 // RDI <- ref
614 // RAX <- ReadBarrierMark(RDI)
615 // ref <- RAX
616 //
617 // we just use rX (the register containing `ref`) as input and output
618 // of a dedicated entrypoint:
619 //
620 // rX <- ReadBarrierMarkRegX(rX)
621 //
622 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100623 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100624 // This runtime call does not require a stack map.
625 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
626
627 // If the new reference is different from the old reference,
628 // update the field in the holder (`*field_addr`).
629 //
630 // Note that this field could also hold a different object, if
631 // another thread had concurrently changed it. In that case, the
632 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
633 // operation below would abort the CAS, leaving the field as-is.
634 NearLabel done;
635 __ cmpl(temp1_, ref_cpu_reg);
636 __ j(kEqual, &done);
637
638 // Update the the holder's field atomically. This may fail if
639 // mutator updates before us, but it's OK. This is achived
640 // using a strong compare-and-set (CAS) operation with relaxed
641 // memory synchronization ordering, where the expected value is
642 // the old reference and the desired value is the new reference.
643 // This operation is implemented with a 32-bit LOCK CMPXLCHG
644 // instruction, which requires the expected value (the old
645 // reference) to be in EAX. Save RAX beforehand, and move the
646 // expected value (stored in `temp1_`) into EAX.
647 __ movq(temp2_, CpuRegister(RAX));
648 __ movl(CpuRegister(RAX), temp1_);
649
650 // Convenience aliases.
651 CpuRegister base = obj_;
652 CpuRegister expected = CpuRegister(RAX);
653 CpuRegister value = ref_cpu_reg;
654
655 bool base_equals_value = (base.AsRegister() == value.AsRegister());
656 Register value_reg = ref_reg;
657 if (kPoisonHeapReferences) {
658 if (base_equals_value) {
659 // If `base` and `value` are the same register location, move
660 // `value_reg` to a temporary register. This way, poisoning
661 // `value_reg` won't invalidate `base`.
662 value_reg = temp1_.AsRegister();
663 __ movl(CpuRegister(value_reg), base);
664 }
665
666 // Check that the register allocator did not assign the location
667 // of `expected` (RAX) to `value` nor to `base`, so that heap
668 // poisoning (when enabled) works as intended below.
669 // - If `value` were equal to `expected`, both references would
670 // be poisoned twice, meaning they would not be poisoned at
671 // all, as heap poisoning uses address negation.
672 // - If `base` were equal to `expected`, poisoning `expected`
673 // would invalidate `base`.
674 DCHECK_NE(value_reg, expected.AsRegister());
675 DCHECK_NE(base.AsRegister(), expected.AsRegister());
676
677 __ PoisonHeapReference(expected);
678 __ PoisonHeapReference(CpuRegister(value_reg));
679 }
680
681 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
682
683 // If heap poisoning is enabled, we need to unpoison the values
684 // that were poisoned earlier.
685 if (kPoisonHeapReferences) {
686 if (base_equals_value) {
687 // `value_reg` has been moved to a temporary register, no need
688 // to unpoison it.
689 } else {
690 __ UnpoisonHeapReference(CpuRegister(value_reg));
691 }
692 // No need to unpoison `expected` (RAX), as it is be overwritten below.
693 }
694
695 // Restore RAX.
696 __ movq(CpuRegister(RAX), temp2_);
697
698 __ Bind(&done);
699 __ jmp(GetExitLabel());
700 }
701
702 private:
703 // The location (register) of the marked object reference.
704 const Location ref_;
705 // The register containing the object holding the marked object reference field.
706 const CpuRegister obj_;
707 // The address of the marked reference field. The base of this address must be `obj_`.
708 const Address field_addr_;
709
710 // Should the reference in `ref_` be unpoisoned prior to marking it?
711 const bool unpoison_ref_before_marking_;
712
713 const CpuRegister temp1_;
714 const CpuRegister temp2_;
715
716 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
717};
718
Roland Levillain0d5a2812015-11-13 10:07:31 +0000719// Slow path generating a read barrier for a heap reference.
720class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
721 public:
722 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
723 Location out,
724 Location ref,
725 Location obj,
726 uint32_t offset,
727 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000728 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000729 out_(out),
730 ref_(ref),
731 obj_(obj),
732 offset_(offset),
733 index_(index) {
734 DCHECK(kEmitCompilerReadBarrier);
735 // If `obj` is equal to `out` or `ref`, it means the initial
736 // object has been overwritten by (or after) the heap object
737 // reference load to be instrumented, e.g.:
738 //
739 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000740 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000741 //
742 // In that case, we have lost the information about the original
743 // object, and the emitted read barrier cannot work properly.
744 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
745 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
746}
747
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100748 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000749 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
750 LocationSummary* locations = instruction_->GetLocations();
751 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
752 DCHECK(locations->CanCall());
753 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100754 DCHECK(instruction_->IsInstanceFieldGet() ||
755 instruction_->IsStaticFieldGet() ||
756 instruction_->IsArrayGet() ||
757 instruction_->IsInstanceOf() ||
758 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700759 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000760 << "Unexpected instruction in read barrier for heap reference slow path: "
761 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000762
763 __ Bind(GetEntryLabel());
764 SaveLiveRegisters(codegen, locations);
765
766 // We may have to change the index's value, but as `index_` is a
767 // constant member (like other "inputs" of this slow path),
768 // introduce a copy of it, `index`.
769 Location index = index_;
770 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100771 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000772 if (instruction_->IsArrayGet()) {
773 // Compute real offset and store it in index_.
774 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
775 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
776 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
777 // We are about to change the value of `index_reg` (see the
778 // calls to art::x86_64::X86_64Assembler::shll and
779 // art::x86_64::X86_64Assembler::AddImmediate below), but it
780 // has not been saved by the previous call to
781 // art::SlowPathCode::SaveLiveRegisters, as it is a
782 // callee-save register --
783 // art::SlowPathCode::SaveLiveRegisters does not consider
784 // callee-save registers, as it has been designed with the
785 // assumption that callee-save registers are supposed to be
786 // handled by the called function. So, as a callee-save
787 // register, `index_reg` _would_ eventually be saved onto
788 // the stack, but it would be too late: we would have
789 // changed its value earlier. Therefore, we manually save
790 // it here into another freely available register,
791 // `free_reg`, chosen of course among the caller-save
792 // registers (as a callee-save `free_reg` register would
793 // exhibit the same problem).
794 //
795 // Note we could have requested a temporary register from
796 // the register allocator instead; but we prefer not to, as
797 // this is a slow path, and we know we can find a
798 // caller-save register that is available.
799 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
800 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
801 index_reg = free_reg;
802 index = Location::RegisterLocation(index_reg);
803 } else {
804 // The initial register stored in `index_` has already been
805 // saved in the call to art::SlowPathCode::SaveLiveRegisters
806 // (as it is not a callee-save register), so we can freely
807 // use it.
808 }
809 // Shifting the index value contained in `index_reg` by the
810 // scale factor (2) cannot overflow in practice, as the
811 // runtime is unable to allocate object arrays with a size
812 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
813 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
814 static_assert(
815 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
816 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
817 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
818 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100819 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
820 // intrinsics, `index_` is not shifted by a scale factor of 2
821 // (as in the case of ArrayGet), as it is actually an offset
822 // to an object field within an object.
823 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000824 DCHECK(instruction_->GetLocations()->Intrinsified());
825 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
826 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
827 << instruction_->AsInvoke()->GetIntrinsic();
828 DCHECK_EQ(offset_, 0U);
829 DCHECK(index_.IsRegister());
830 }
831 }
832
833 // We're moving two or three locations to locations that could
834 // overlap, so we need a parallel move resolver.
835 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100836 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000837 parallel_move.AddMove(ref_,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000840 nullptr);
841 parallel_move.AddMove(obj_,
842 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100843 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000844 nullptr);
845 if (index.IsValid()) {
846 parallel_move.AddMove(index,
847 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100848 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000849 nullptr);
850 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
851 } else {
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
854 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100855 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000856 instruction_,
857 instruction_->GetDexPc(),
858 this);
859 CheckEntrypointTypes<
860 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
861 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
862
863 RestoreLiveRegisters(codegen, locations);
864 __ jmp(GetExitLabel());
865 }
866
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100867 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000868 return "ReadBarrierForHeapReferenceSlowPathX86_64";
869 }
870
871 private:
872 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
873 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
874 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
875 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
876 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
877 return static_cast<CpuRegister>(i);
878 }
879 }
880 // We shall never fail to find a free caller-save register, as
881 // there are more than two core caller-save registers on x86-64
882 // (meaning it is possible to find one which is different from
883 // `ref` and `obj`).
884 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
885 LOG(FATAL) << "Could not find a free caller-save register";
886 UNREACHABLE();
887 }
888
Roland Levillain0d5a2812015-11-13 10:07:31 +0000889 const Location out_;
890 const Location ref_;
891 const Location obj_;
892 const uint32_t offset_;
893 // An additional location containing an index to an array.
894 // Only used for HArrayGet and the UnsafeGetObject &
895 // UnsafeGetObjectVolatile intrinsics.
896 const Location index_;
897
898 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
899};
900
901// Slow path generating a read barrier for a GC root.
902class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
903 public:
904 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000905 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000906 DCHECK(kEmitCompilerReadBarrier);
907 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000908
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100909 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910 LocationSummary* locations = instruction_->GetLocations();
911 DCHECK(locations->CanCall());
912 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000913 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
914 << "Unexpected instruction in read barrier for GC root slow path: "
915 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000916
917 __ Bind(GetEntryLabel());
918 SaveLiveRegisters(codegen, locations);
919
920 InvokeRuntimeCallingConvention calling_convention;
921 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
922 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100923 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000924 instruction_,
925 instruction_->GetDexPc(),
926 this);
927 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
928 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
929
930 RestoreLiveRegisters(codegen, locations);
931 __ jmp(GetExitLabel());
932 }
933
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100934 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000935
936 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000937 const Location out_;
938 const Location root_;
939
940 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
941};
942
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100943#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100944// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
945#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100946
Roland Levillain4fa13f62015-07-06 18:11:54 +0100947inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700948 switch (cond) {
949 case kCondEQ: return kEqual;
950 case kCondNE: return kNotEqual;
951 case kCondLT: return kLess;
952 case kCondLE: return kLessEqual;
953 case kCondGT: return kGreater;
954 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700955 case kCondB: return kBelow;
956 case kCondBE: return kBelowEqual;
957 case kCondA: return kAbove;
958 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700959 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100960 LOG(FATAL) << "Unreachable";
961 UNREACHABLE();
962}
963
Aart Bike9f37602015-10-09 11:15:55 -0700964// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100965inline Condition X86_64FPCondition(IfCondition cond) {
966 switch (cond) {
967 case kCondEQ: return kEqual;
968 case kCondNE: return kNotEqual;
969 case kCondLT: return kBelow;
970 case kCondLE: return kBelowEqual;
971 case kCondGT: return kAbove;
972 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700973 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800974 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100975 LOG(FATAL) << "Unreachable";
976 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700977}
978
Vladimir Markodc151b22015-10-15 18:02:30 +0100979HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
980 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100981 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000982 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100983}
984
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100985void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
986 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800987 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000988
Vladimir Marko58155012015-08-19 12:49:41 +0000989 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
990 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100991 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000992 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 uint32_t offset =
994 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Andreas Gampe3db70682018-12-26 15:12:03 -0800995 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000996 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100997 }
Vladimir Marko58155012015-08-19 12:49:41 +0000998 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +0000999 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001000 break;
Vladimir Marko65979462017-05-19 17:25:12 +01001001 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
1002 DCHECK(GetCompilerOptions().IsBootImage());
1003 __ leal(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001004 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001005 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001006 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001007 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1008 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1009 __ movl(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001010 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001011 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001012 break;
1013 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001014 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001015 __ movq(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001016 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001017 RecordMethodBssEntryPatch(invoke);
Vladimir Marko58155012015-08-19 12:49:41 +00001018 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001019 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001020 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
1021 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
1022 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001023 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1024 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1025 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001026 }
Vladimir Marko58155012015-08-19 12:49:41 +00001027 }
1028
1029 switch (invoke->GetCodePtrLocation()) {
1030 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1031 __ call(&frame_entry_label_);
1032 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001033 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1034 // (callee_method + offset_of_quick_compiled_code)()
1035 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1036 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001037 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001038 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001039 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001040 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001041
1042 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001043}
1044
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001045void CodeGeneratorX86_64::GenerateVirtualCall(
1046 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001047 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1048 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1049 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001050
1051 // Use the calling convention instead of the location of the receiver, as
1052 // intrinsics may have put the receiver in a different register. In the intrinsics
1053 // slow path, the arguments have been moved to the right place, so here we are
1054 // guaranteed that the receiver is the first register of the calling convention.
1055 InvokeDexCallingConvention calling_convention;
1056 Register receiver = calling_convention.GetRegisterAt(0);
1057
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001058 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001059 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001060 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001062 // Instead of simply (possibly) unpoisoning `temp` here, we should
1063 // emit a read barrier for the previous class reference load.
1064 // However this is not required in practice, as this is an
1065 // intermediate/temporary reference and because the current
1066 // concurrent copying collector keeps the from-space memory
1067 // intact/accessible until the end of the marking phase (the
1068 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001069 __ MaybeUnpoisonHeapReference(temp);
1070 // temp = temp->GetMethodAt(method_offset);
1071 __ movq(temp, Address(temp, method_offset));
1072 // call temp->GetEntryPoint();
1073 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001074 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001075 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076}
1077
Vladimir Marko6fd16062018-06-26 11:02:04 +01001078void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001079 boot_image_intrinsic_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001080 __ Bind(&boot_image_intrinsic_patches_.back().label);
1081}
1082
Vladimir Markob066d432018-01-03 13:14:37 +00001083void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001084 boot_image_method_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001085 __ Bind(&boot_image_method_patches_.back().label);
1086}
1087
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001088void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1089 boot_image_method_patches_.emplace_back(
1090 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001091 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001092}
1093
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001094void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1095 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1096 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001097}
1098
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001099void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1100 boot_image_type_patches_.emplace_back(
1101 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001102 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001103}
1104
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001105Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001106 type_bss_entry_patches_.emplace_back(
1107 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001108 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001109}
1110
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001111void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1112 boot_image_string_patches_.emplace_back(
1113 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1114 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001115}
1116
Vladimir Markoaad75c62016-10-03 08:46:48 +00001117Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001118 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001119 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001120 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001121}
1122
Vladimir Marko6fd16062018-06-26 11:02:04 +01001123void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1124 if (GetCompilerOptions().IsBootImage()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001125 __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001126 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001127 } else if (GetCompilerOptions().GetCompilePic()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001128 __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001129 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001130 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001131 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001132 gc::Heap* heap = Runtime::Current()->GetHeap();
1133 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001134 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001135 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1136 }
1137}
1138
Vladimir Marko6fd16062018-06-26 11:02:04 +01001139void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1140 uint32_t boot_image_offset) {
1141 DCHECK(invoke->IsStatic());
1142 InvokeRuntimeCallingConvention calling_convention;
1143 CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
1144 if (GetCompilerOptions().IsBootImage()) {
1145 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1146 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1147 __ leal(argument,
Andreas Gampe3db70682018-12-26 15:12:03 -08001148 Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001149 MethodReference target_method = invoke->GetTargetMethod();
1150 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1151 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1152 __ Bind(&boot_image_type_patches_.back().label);
1153 } else {
1154 LoadBootImageAddress(argument, boot_image_offset);
1155 }
1156 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1157 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1158}
1159
Vladimir Markoaad75c62016-10-03 08:46:48 +00001160// The label points to the end of the "movl" or another instruction but the literal offset
1161// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1162constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1163
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001164template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001165inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1166 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001167 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001168 for (const PatchInfo<Label>& info : infos) {
1169 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1170 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001171 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001172 }
1173}
1174
Vladimir Marko6fd16062018-06-26 11:02:04 +01001175template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1176linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1177 const DexFile* target_dex_file,
1178 uint32_t pc_insn_offset,
1179 uint32_t boot_image_offset) {
1180 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1181 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001182}
1183
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001184void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001185 DCHECK(linker_patches->empty());
1186 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001187 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001188 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001189 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001190 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001191 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001192 string_bss_entry_patches_.size() +
1193 boot_image_intrinsic_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001194 linker_patches->reserve(size);
Vladimir Marko764d4542017-05-16 10:31:41 +01001195 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001196 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1197 boot_image_method_patches_, linker_patches);
1198 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1199 boot_image_type_patches_, linker_patches);
1200 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001201 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001202 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1203 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001204 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001205 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00001206 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001207 DCHECK(boot_image_type_patches_.empty());
1208 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001209 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001210 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001211 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1212 method_bss_entry_patches_, linker_patches);
1213 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1214 type_bss_entry_patches_, linker_patches);
1215 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1216 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001217 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001218}
1219
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001220void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001221 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001222}
1223
1224void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001225 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001226}
1227
Vladimir Markoa0431112018-06-25 09:32:54 +01001228const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1229 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1230}
1231
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001232size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1233 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1234 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001235}
1236
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001237size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1238 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1239 return kX86_64WordSize;
1240}
1241
1242size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001243 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001244 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001245 } else {
1246 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1247 }
1248 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001249}
1250
1251size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001252 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001253 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001254 } else {
1255 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1256 }
1257 return GetFloatingPointSpillSlotSize();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001258}
1259
Calin Juravle175dc732015-08-25 15:42:32 +01001260void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1261 HInstruction* instruction,
1262 uint32_t dex_pc,
1263 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001264 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001265 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1266 if (EntrypointRequiresStackMap(entrypoint)) {
1267 RecordPcInfo(instruction, dex_pc, slow_path);
1268 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001269}
1270
Roland Levillaindec8f632016-07-22 17:10:06 +01001271void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1272 HInstruction* instruction,
1273 SlowPathCode* slow_path) {
1274 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001275 GenerateInvokeRuntime(entry_point_offset);
1276}
1277
1278void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001279 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001280}
1281
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001282static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001283// Use a fake return address register to mimic Quick.
1284static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001285CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001286 const CompilerOptions& compiler_options,
1287 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001288 : CodeGenerator(graph,
1289 kNumberOfCpuRegisters,
1290 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001291 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001292 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1293 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001294 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001295 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1296 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001297 compiler_options,
1298 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001299 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001300 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001301 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001302 move_resolver_(graph->GetAllocator(), this),
1303 assembler_(graph->GetAllocator()),
Vladimir Marko58155012015-08-19 12:49:41 +00001304 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001305 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1306 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1307 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1308 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001309 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001310 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001311 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001312 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1313 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1314 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001315 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1316}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001317
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001318InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1319 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001320 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001321 assembler_(codegen->GetAssembler()),
1322 codegen_(codegen) {}
1323
David Brazdil58282f42016-01-14 12:45:10 +00001324void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001325 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001326 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001327
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001328 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001329 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330}
1331
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001332static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001333 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001334}
David Srbecky9d8606d2015-04-12 09:35:32 +01001335
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001336static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001337 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001338}
1339
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001340void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001341 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001342 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001343 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001344 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001345 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001346
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001347 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1348 __ addw(Address(CpuRegister(kMethodRegisterArgument),
1349 ArtMethod::HotnessCountOffset().Int32Value()),
1350 Immediate(1));
1351 }
1352
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001353 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001354 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1355 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001356 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001357 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001358
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001359 if (HasEmptyFrame()) {
1360 return;
1361 }
1362
Nicolas Geoffray98893962015-01-21 12:32:32 +00001363 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001364 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001365 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001366 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001367 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1368 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001369 }
1370 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001371
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001372 int adjust = GetFrameSize() - GetCoreSpillSize();
1373 __ subq(CpuRegister(RSP), Immediate(adjust));
1374 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001375 uint32_t xmm_spill_location = GetFpuSpillStart();
1376 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001377
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001378 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1379 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001380 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1381 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1382 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001383 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001384 }
1385
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001386 // Save the current method if we need it. Note that we do not
1387 // do this in HCurrentMethod, as the instruction might have been removed
1388 // in the SSA graph.
1389 if (RequiresCurrentMethod()) {
1390 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1391 CpuRegister(kMethodRegisterArgument));
1392 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001393
1394 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1395 // Initialize should_deoptimize flag to 0.
1396 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1397 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001398}
1399
1400void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001401 __ cfi().RememberState();
1402 if (!HasEmptyFrame()) {
1403 uint32_t xmm_spill_location = GetFpuSpillStart();
1404 size_t xmm_spill_slot_size = GetFloatingPointSpillSlotSize();
1405 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1406 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1407 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1408 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1409 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1410 }
1411 }
1412
1413 int adjust = GetFrameSize() - GetCoreSpillSize();
1414 __ addq(CpuRegister(RSP), Immediate(adjust));
1415 __ cfi().AdjustCFAOffset(-adjust);
1416
1417 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1418 Register reg = kCoreCalleeSaves[i];
1419 if (allocated_registers_.ContainsCoreRegister(reg)) {
1420 __ popq(CpuRegister(reg));
1421 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1422 __ cfi().Restore(DWARFReg(reg));
1423 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001424 }
1425 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001426 __ ret();
1427 __ cfi().RestoreState();
1428 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001429}
1430
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001431void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1432 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001433}
1434
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001435void CodeGeneratorX86_64::Move(Location destination, Location source) {
1436 if (source.Equals(destination)) {
1437 return;
1438 }
1439 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001440 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001441 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001442 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001443 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001444 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001445 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001446 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1447 } else if (source.IsConstant()) {
1448 HConstant* constant = source.GetConstant();
1449 if (constant->IsLongConstant()) {
1450 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1451 } else {
1452 Load32BitValue(dest, GetInt32ValueOf(constant));
1453 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001454 } else {
1455 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001456 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001457 }
1458 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001459 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001460 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001461 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001462 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001463 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1464 } else if (source.IsConstant()) {
1465 HConstant* constant = source.GetConstant();
1466 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1467 if (constant->IsFloatConstant()) {
1468 Load32BitValue(dest, static_cast<int32_t>(value));
1469 } else {
1470 Load64BitValue(dest, value);
1471 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001472 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001473 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001474 } else {
1475 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001476 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001477 }
1478 } else if (destination.IsStackSlot()) {
1479 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001480 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001481 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001482 } else if (source.IsFpuRegister()) {
1483 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001484 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001485 } else if (source.IsConstant()) {
1486 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001487 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001488 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001489 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001490 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001491 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1492 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001493 }
1494 } else {
1495 DCHECK(destination.IsDoubleStackSlot());
1496 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001497 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001498 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001499 } else if (source.IsFpuRegister()) {
1500 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001501 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001502 } else if (source.IsConstant()) {
1503 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001504 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1505 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001506 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001507 } else {
1508 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001509 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1510 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001511 }
1512 }
1513}
1514
Calin Juravle175dc732015-08-25 15:42:32 +01001515void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1516 DCHECK(location.IsRegister());
1517 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1518}
1519
Calin Juravlee460d1d2015-09-29 04:52:17 +01001520void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001521 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001522 Move(dst, src);
1523}
1524
1525void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1526 if (location.IsRegister()) {
1527 locations->AddTemp(location);
1528 } else {
1529 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1530 }
1531}
1532
David Brazdilfc6a86a2015-06-26 10:33:45 +00001533void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001534 if (successor->IsExitBlock()) {
1535 DCHECK(got->GetPrevious()->AlwaysThrows());
1536 return; // no code needed
1537 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001538
1539 HBasicBlock* block = got->GetBlock();
1540 HInstruction* previous = got->GetPrevious();
1541
1542 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001543 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001544 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1545 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), 0));
1546 __ addw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1547 Immediate(1));
1548 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001549 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1550 return;
1551 }
1552
1553 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1554 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1555 }
1556 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001557 __ jmp(codegen_->GetLabelOf(successor));
1558 }
1559}
1560
David Brazdilfc6a86a2015-06-26 10:33:45 +00001561void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1562 got->SetLocations(nullptr);
1563}
1564
1565void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1566 HandleGoto(got, got->GetSuccessor());
1567}
1568
1569void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1570 try_boundary->SetLocations(nullptr);
1571}
1572
1573void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1574 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1575 if (!successor->IsExitBlock()) {
1576 HandleGoto(try_boundary, successor);
1577 }
1578}
1579
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001580void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1581 exit->SetLocations(nullptr);
1582}
1583
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001584void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001585}
1586
Mark Mendell152408f2015-12-31 12:28:50 -05001587template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001588void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001589 LabelType* true_label,
1590 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001591 if (cond->IsFPConditionTrueIfNaN()) {
1592 __ j(kUnordered, true_label);
1593 } else if (cond->IsFPConditionFalseIfNaN()) {
1594 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001595 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001596 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001597}
1598
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001599void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001600 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001601
Mark Mendellc4701932015-04-10 13:18:51 -04001602 Location left = locations->InAt(0);
1603 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001604 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001605 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001606 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001607 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001608 case DataType::Type::kInt8:
1609 case DataType::Type::kUint16:
1610 case DataType::Type::kInt16:
1611 case DataType::Type::kInt32:
1612 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001613 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001614 break;
1615 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001616 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001617 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001618 break;
1619 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001621 if (right.IsFpuRegister()) {
1622 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1623 } else if (right.IsConstant()) {
1624 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1625 codegen_->LiteralFloatAddress(
1626 right.GetConstant()->AsFloatConstant()->GetValue()));
1627 } else {
1628 DCHECK(right.IsStackSlot());
1629 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1630 Address(CpuRegister(RSP), right.GetStackIndex()));
1631 }
Mark Mendellc4701932015-04-10 13:18:51 -04001632 break;
1633 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001634 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001635 if (right.IsFpuRegister()) {
1636 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1637 } else if (right.IsConstant()) {
1638 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1639 codegen_->LiteralDoubleAddress(
1640 right.GetConstant()->AsDoubleConstant()->GetValue()));
1641 } else {
1642 DCHECK(right.IsDoubleStackSlot());
1643 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1644 Address(CpuRegister(RSP), right.GetStackIndex()));
1645 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001646 break;
1647 }
1648 default:
1649 LOG(FATAL) << "Unexpected condition type " << type;
1650 }
1651}
1652
1653template<class LabelType>
1654void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1655 LabelType* true_target_in,
1656 LabelType* false_target_in) {
1657 // Generated branching requires both targets to be explicit. If either of the
1658 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1659 LabelType fallthrough_target;
1660 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1661 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1662
1663 // Generate the comparison to set the CC.
1664 GenerateCompareTest(condition);
1665
1666 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001667 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001668 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001669 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001670 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1671 break;
1672 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001673 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001674 GenerateFPJumps(condition, true_target, false_target);
1675 break;
1676 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001677 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001678 GenerateFPJumps(condition, true_target, false_target);
1679 break;
1680 }
1681 default:
1682 LOG(FATAL) << "Unexpected condition type " << type;
1683 }
1684
David Brazdil0debae72015-11-12 18:37:00 +00001685 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001686 __ jmp(false_target);
1687 }
David Brazdil0debae72015-11-12 18:37:00 +00001688
1689 if (fallthrough_target.IsLinked()) {
1690 __ Bind(&fallthrough_target);
1691 }
Mark Mendellc4701932015-04-10 13:18:51 -04001692}
1693
David Brazdil0debae72015-11-12 18:37:00 +00001694static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1695 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1696 // are set only strictly before `branch`. We can't use the eflags on long
1697 // conditions if they are materialized due to the complex branching.
1698 return cond->IsCondition() &&
1699 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001700 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001701}
1702
Mark Mendell152408f2015-12-31 12:28:50 -05001703template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001704void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001705 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001706 LabelType* true_target,
1707 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001708 HInstruction* cond = instruction->InputAt(condition_input_index);
1709
1710 if (true_target == nullptr && false_target == nullptr) {
1711 // Nothing to do. The code always falls through.
1712 return;
1713 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001714 // Constant condition, statically compared against "true" (integer value 1).
1715 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001716 if (true_target != nullptr) {
1717 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001718 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001719 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001720 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001721 if (false_target != nullptr) {
1722 __ jmp(false_target);
1723 }
1724 }
1725 return;
1726 }
1727
1728 // The following code generates these patterns:
1729 // (1) true_target == nullptr && false_target != nullptr
1730 // - opposite condition true => branch to false_target
1731 // (2) true_target != nullptr && false_target == nullptr
1732 // - condition true => branch to true_target
1733 // (3) true_target != nullptr && false_target != nullptr
1734 // - condition true => branch to true_target
1735 // - branch to false_target
1736 if (IsBooleanValueOrMaterializedCondition(cond)) {
1737 if (AreEflagsSetFrom(cond, instruction)) {
1738 if (true_target == nullptr) {
1739 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1740 } else {
1741 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1742 }
1743 } else {
1744 // Materialized condition, compare against 0.
1745 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1746 if (lhs.IsRegister()) {
1747 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1748 } else {
1749 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1750 }
1751 if (true_target == nullptr) {
1752 __ j(kEqual, false_target);
1753 } else {
1754 __ j(kNotEqual, true_target);
1755 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001756 }
1757 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001758 // Condition has not been materialized, use its inputs as the
1759 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001760 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001761
David Brazdil0debae72015-11-12 18:37:00 +00001762 // If this is a long or FP comparison that has been folded into
1763 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001764 DataType::Type type = condition->InputAt(0)->GetType();
1765 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001766 GenerateCompareTestAndBranch(condition, true_target, false_target);
1767 return;
1768 }
1769
1770 Location lhs = condition->GetLocations()->InAt(0);
1771 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001772 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001773 if (true_target == nullptr) {
1774 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1775 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001776 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001777 }
Dave Allison20dfc792014-06-16 20:44:29 -07001778 }
David Brazdil0debae72015-11-12 18:37:00 +00001779
1780 // If neither branch falls through (case 3), the conditional branch to `true_target`
1781 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1782 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001783 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001784 }
1785}
1786
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001787void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001788 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001789 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001790 locations->SetInAt(0, Location::Any());
1791 }
1792}
1793
1794void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001795 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1796 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1797 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1798 nullptr : codegen_->GetLabelOf(true_successor);
1799 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1800 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001801 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001802}
1803
1804void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001805 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001806 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001807 InvokeRuntimeCallingConvention calling_convention;
1808 RegisterSet caller_saves = RegisterSet::Empty();
1809 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1810 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001811 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001812 locations->SetInAt(0, Location::Any());
1813 }
1814}
1815
1816void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001817 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001818 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001819 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001820 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001821 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001822}
1823
Mingyao Yang063fc772016-08-02 11:02:54 -07001824void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001825 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001826 LocationSummary(flag, LocationSummary::kNoCall);
1827 locations->SetOut(Location::RequiresRegister());
1828}
1829
1830void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1831 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1832 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1833}
1834
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001835static bool SelectCanUseCMOV(HSelect* select) {
1836 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001837 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001838 return false;
1839 }
1840
1841 // A FP condition doesn't generate the single CC that we need.
1842 HInstruction* condition = select->GetCondition();
1843 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001844 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001845 return false;
1846 }
1847
1848 // We can generate a CMOV for this Select.
1849 return true;
1850}
1851
David Brazdil74eb1b22015-12-14 11:44:01 +00001852void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001853 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001854 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001855 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001856 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001857 } else {
1858 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001859 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001860 if (select->InputAt(1)->IsConstant()) {
1861 locations->SetInAt(1, Location::RequiresRegister());
1862 } else {
1863 locations->SetInAt(1, Location::Any());
1864 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001865 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001866 locations->SetInAt(1, Location::Any());
1867 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001868 }
1869 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1870 locations->SetInAt(2, Location::RequiresRegister());
1871 }
1872 locations->SetOut(Location::SameAsFirstInput());
1873}
1874
1875void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1876 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001877 if (SelectCanUseCMOV(select)) {
1878 // If both the condition and the source types are integer, we can generate
1879 // a CMOV to implement Select.
1880 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001881 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001882 DCHECK(locations->InAt(0).Equals(locations->Out()));
1883
1884 HInstruction* select_condition = select->GetCondition();
1885 Condition cond = kNotEqual;
1886
1887 // Figure out how to test the 'condition'.
1888 if (select_condition->IsCondition()) {
1889 HCondition* condition = select_condition->AsCondition();
1890 if (!condition->IsEmittedAtUseSite()) {
1891 // This was a previously materialized condition.
1892 // Can we use the existing condition code?
1893 if (AreEflagsSetFrom(condition, select)) {
1894 // Materialization was the previous instruction. Condition codes are right.
1895 cond = X86_64IntegerCondition(condition->GetCondition());
1896 } else {
1897 // No, we have to recreate the condition code.
1898 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1899 __ testl(cond_reg, cond_reg);
1900 }
1901 } else {
1902 GenerateCompareTest(condition);
1903 cond = X86_64IntegerCondition(condition->GetCondition());
1904 }
1905 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001906 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001907 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1908 __ testl(cond_reg, cond_reg);
1909 }
1910
1911 // If the condition is true, overwrite the output, which already contains false.
1912 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001913 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001914 if (value_true_loc.IsRegister()) {
1915 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1916 } else {
1917 __ cmov(cond,
1918 value_false,
1919 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1920 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001921 } else {
1922 NearLabel false_target;
1923 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08001924 /* condition_input_index= */ 2,
1925 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001926 &false_target);
1927 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1928 __ Bind(&false_target);
1929 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001930}
1931
David Srbecky0cf44932015-12-09 14:09:59 +00001932void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001933 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001934}
1935
David Srbeckyd28f4a02016-03-14 17:14:24 +00001936void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1937 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001938}
1939
1940void CodeGeneratorX86_64::GenerateNop() {
1941 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001942}
1943
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001944void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001945 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001946 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001947 // Handle the long/FP comparisons made in instruction simplification.
1948 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001949 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001950 locations->SetInAt(0, Location::RequiresRegister());
1951 locations->SetInAt(1, Location::Any());
1952 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001953 case DataType::Type::kFloat32:
1954 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04001955 locations->SetInAt(0, Location::RequiresFpuRegister());
1956 locations->SetInAt(1, Location::Any());
1957 break;
1958 default:
1959 locations->SetInAt(0, Location::RequiresRegister());
1960 locations->SetInAt(1, Location::Any());
1961 break;
1962 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001963 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001964 locations->SetOut(Location::RequiresRegister());
1965 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001966}
1967
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001968void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001969 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001970 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001971 }
Mark Mendellc4701932015-04-10 13:18:51 -04001972
1973 LocationSummary* locations = cond->GetLocations();
1974 Location lhs = locations->InAt(0);
1975 Location rhs = locations->InAt(1);
1976 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001977 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001978
1979 switch (cond->InputAt(0)->GetType()) {
1980 default:
1981 // Integer case.
1982
1983 // Clear output register: setcc only sets the low byte.
1984 __ xorl(reg, reg);
1985
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001986 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001987 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001988 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001989 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001990 // Clear output register: setcc only sets the low byte.
1991 __ xorl(reg, reg);
1992
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001993 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001994 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001995 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001996 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001997 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
1998 if (rhs.IsConstant()) {
1999 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2000 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2001 } else if (rhs.IsStackSlot()) {
2002 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2003 } else {
2004 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2005 }
2006 GenerateFPJumps(cond, &true_label, &false_label);
2007 break;
2008 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002009 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002010 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2011 if (rhs.IsConstant()) {
2012 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2013 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2014 } else if (rhs.IsDoubleStackSlot()) {
2015 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2016 } else {
2017 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2018 }
2019 GenerateFPJumps(cond, &true_label, &false_label);
2020 break;
2021 }
2022 }
2023
2024 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002025 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002026
Roland Levillain4fa13f62015-07-06 18:11:54 +01002027 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002028 __ Bind(&false_label);
2029 __ xorl(reg, reg);
2030 __ jmp(&done_label);
2031
Roland Levillain4fa13f62015-07-06 18:11:54 +01002032 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002033 __ Bind(&true_label);
2034 __ movl(reg, Immediate(1));
2035 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002036}
2037
2038void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002039 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002040}
2041
2042void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002043 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002044}
2045
2046void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002047 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002048}
2049
2050void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002051 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002052}
2053
2054void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002055 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002056}
2057
2058void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002059 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002060}
2061
2062void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002063 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002064}
2065
2066void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002067 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002068}
2069
2070void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002071 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002072}
2073
2074void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002075 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002076}
2077
2078void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002079 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002080}
2081
2082void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002083 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002084}
2085
Aart Bike9f37602015-10-09 11:15:55 -07002086void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002087 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002088}
2089
2090void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002091 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002092}
2093
2094void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002095 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002096}
2097
2098void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002099 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002100}
2101
2102void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002103 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002104}
2105
2106void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002107 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002108}
2109
2110void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002111 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002112}
2113
2114void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002115 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002116}
2117
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002118void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002119 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002120 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002121 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002122 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002123 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002124 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002125 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002126 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002127 case DataType::Type::kInt32:
2128 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002129 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002130 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002131 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2132 break;
2133 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002134 case DataType::Type::kFloat32:
2135 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002136 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002137 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002138 locations->SetOut(Location::RequiresRegister());
2139 break;
2140 }
2141 default:
2142 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2143 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002144}
2145
2146void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002147 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002148 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002149 Location left = locations->InAt(0);
2150 Location right = locations->InAt(1);
2151
Mark Mendell0c9497d2015-08-21 09:30:05 -04002152 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002153 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002154 Condition less_cond = kLess;
2155
Calin Juravleddb7df22014-11-25 20:56:51 +00002156 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002157 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002158 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002159 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002160 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002161 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002162 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002163 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002164 break;
2165 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002166 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002167 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002168 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002169 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002170 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002171 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2172 if (right.IsConstant()) {
2173 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2174 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2175 } else if (right.IsStackSlot()) {
2176 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2177 } else {
2178 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2179 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002180 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002181 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002182 break;
2183 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002184 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002185 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2186 if (right.IsConstant()) {
2187 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2188 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2189 } else if (right.IsDoubleStackSlot()) {
2190 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2191 } else {
2192 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2193 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002194 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002195 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002196 break;
2197 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002198 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002199 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002200 }
Aart Bika19616e2016-02-01 18:57:58 -08002201
Calin Juravleddb7df22014-11-25 20:56:51 +00002202 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002203 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002204 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002205
Calin Juravle91debbc2014-11-26 19:01:09 +00002206 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002207 __ movl(out, Immediate(1));
2208 __ jmp(&done);
2209
2210 __ Bind(&less);
2211 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002212
2213 __ Bind(&done);
2214}
2215
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002216void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002217 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002218 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002219 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002220}
2221
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002222void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002223 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002224}
2225
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002226void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2227 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002228 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002229 locations->SetOut(Location::ConstantLocation(constant));
2230}
2231
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002232void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002233 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002234}
2235
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002236void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002237 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002238 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002239 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002240}
2241
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002242void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002243 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244}
2245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002246void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2247 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002248 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002249 locations->SetOut(Location::ConstantLocation(constant));
2250}
2251
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002252void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002253 // Will be generated at use site.
2254}
2255
2256void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2257 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002258 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002259 locations->SetOut(Location::ConstantLocation(constant));
2260}
2261
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002262void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2263 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002264 // Will be generated at use site.
2265}
2266
Igor Murashkind01745e2017-04-05 16:40:31 -07002267void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2268 constructor_fence->SetLocations(nullptr);
2269}
2270
2271void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2272 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2273 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2274}
2275
Calin Juravle27df7582015-04-17 19:12:31 +01002276void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2277 memory_barrier->SetLocations(nullptr);
2278}
2279
2280void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002281 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002282}
2283
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002284void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2285 ret->SetLocations(nullptr);
2286}
2287
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002288void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002289 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002290}
2291
2292void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002293 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002294 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002295 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002296 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002297 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002298 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002299 case DataType::Type::kInt8:
2300 case DataType::Type::kUint16:
2301 case DataType::Type::kInt16:
2302 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002303 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002304 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002305 break;
2306
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002307 case DataType::Type::kFloat32:
2308 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002309 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002310 break;
2311
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002312 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002313 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002314 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002315}
2316
2317void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2318 if (kIsDebugBuild) {
2319 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002320 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002321 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002322 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002323 case DataType::Type::kInt8:
2324 case DataType::Type::kUint16:
2325 case DataType::Type::kInt16:
2326 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002327 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002328 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002329 break;
2330
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002331 case DataType::Type::kFloat32:
2332 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002333 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002334 XMM0);
2335 break;
2336
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002337 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002338 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002339 }
2340 }
2341 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002342}
2343
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002344Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002345 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002346 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002347 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002348 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002349 case DataType::Type::kInt8:
2350 case DataType::Type::kUint16:
2351 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002352 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002353 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002354 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002355 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002356 return Location::RegisterLocation(RAX);
2357
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002358 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002359 return Location::NoLocation();
2360
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002361 case DataType::Type::kFloat64:
2362 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002363 return Location::FpuRegisterLocation(XMM0);
2364 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002365
2366 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002367}
2368
2369Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2370 return Location::RegisterLocation(kMethodRegisterArgument);
2371}
2372
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002373Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002374 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002375 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002376 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002377 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002378 case DataType::Type::kInt8:
2379 case DataType::Type::kUint16:
2380 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002381 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002382 uint32_t index = gp_index_++;
2383 stack_index_++;
2384 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002385 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002386 } else {
2387 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2388 }
2389 }
2390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002391 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002392 uint32_t index = gp_index_;
2393 stack_index_ += 2;
2394 if (index < calling_convention.GetNumberOfRegisters()) {
2395 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002396 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002397 } else {
2398 gp_index_ += 2;
2399 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2400 }
2401 }
2402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002403 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002404 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002405 stack_index_++;
2406 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002407 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002408 } else {
2409 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2410 }
2411 }
2412
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002413 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002414 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002415 stack_index_ += 2;
2416 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002417 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002418 } else {
2419 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2420 }
2421 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002422
Aart Bik66c158e2018-01-31 12:55:04 -08002423 case DataType::Type::kUint32:
2424 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002425 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002426 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002427 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002428 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002429 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002430}
2431
Calin Juravle175dc732015-08-25 15:42:32 +01002432void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2433 // The trampoline uses the same calling convention as dex calling conventions,
2434 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2435 // the method_idx.
2436 HandleInvoke(invoke);
2437}
2438
2439void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2440 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2441}
2442
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002443void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002444 // Explicit clinit checks triggered by static invokes must have been pruned by
2445 // art::PrepareForRegisterAllocation.
2446 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002447
Mark Mendellfb8d2792015-03-31 22:16:59 -04002448 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002449 if (intrinsic.TryDispatch(invoke)) {
2450 return;
2451 }
2452
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002453 HandleInvoke(invoke);
2454}
2455
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002456static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2457 if (invoke->GetLocations()->Intrinsified()) {
2458 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2459 intrinsic.Dispatch(invoke);
2460 return true;
2461 }
2462 return false;
2463}
2464
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002465void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002466 // Explicit clinit checks triggered by static invokes must have been pruned by
2467 // art::PrepareForRegisterAllocation.
2468 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002469
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002470 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2471 return;
2472 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002473
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002474 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002475 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002476 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002477}
2478
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002479void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002480 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002481 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002482}
2483
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002484void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002485 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002486 if (intrinsic.TryDispatch(invoke)) {
2487 return;
2488 }
2489
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002490 HandleInvoke(invoke);
2491}
2492
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002493void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002494 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2495 return;
2496 }
2497
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002498 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002499 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002500}
2501
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002502void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2503 HandleInvoke(invoke);
2504 // Add the hidden argument.
2505 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2506}
2507
2508void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2509 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002510 LocationSummary* locations = invoke->GetLocations();
2511 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2512 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002513 Location receiver = locations->InAt(0);
2514 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2515
Roland Levillain0d5a2812015-11-13 10:07:31 +00002516 // Set the hidden argument. This is safe to do this here, as RAX
2517 // won't be modified thereafter, before the `call` instruction.
2518 DCHECK_EQ(RAX, hidden_reg.AsRegister());
Mark Mendell92e83bf2015-05-07 11:25:03 -04002519 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002520
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002521 if (receiver.IsStackSlot()) {
2522 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002523 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002524 __ movl(temp, Address(temp, class_offset));
2525 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002526 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002527 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002528 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002529 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002530 // Instead of simply (possibly) unpoisoning `temp` here, we should
2531 // emit a read barrier for the previous class reference load.
2532 // However this is not required in practice, as this is an
2533 // intermediate/temporary reference and because the current
2534 // concurrent copying collector keeps the from-space memory
2535 // intact/accessible until the end of the marking phase (the
2536 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002537 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002538 // temp = temp->GetAddressOfIMT()
2539 __ movq(temp,
2540 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2541 // temp = temp->GetImtEntryAt(method_offset);
2542 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002543 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002544 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002545 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002546 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002547 __ call(Address(
2548 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002549
2550 DCHECK(!codegen_->IsLeafMethod());
2551 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2552}
2553
Orion Hodsonac141392017-01-13 11:53:47 +00002554void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2555 HandleInvoke(invoke);
2556}
2557
2558void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2559 codegen_->GenerateInvokePolymorphicCall(invoke);
2560}
2561
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002562void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2563 HandleInvoke(invoke);
2564}
2565
2566void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2567 codegen_->GenerateInvokeCustomCall(invoke);
2568}
2569
Roland Levillain88cb1752014-10-20 16:36:47 +01002570void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2571 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002572 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002573 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002574 case DataType::Type::kInt32:
2575 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002576 locations->SetInAt(0, Location::RequiresRegister());
2577 locations->SetOut(Location::SameAsFirstInput());
2578 break;
2579
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002580 case DataType::Type::kFloat32:
2581 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002582 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002583 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002584 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002585 break;
2586
2587 default:
2588 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2589 }
2590}
2591
2592void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2593 LocationSummary* locations = neg->GetLocations();
2594 Location out = locations->Out();
2595 Location in = locations->InAt(0);
2596 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002597 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002598 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002599 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002600 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002601 break;
2602
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002603 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002604 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002605 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002606 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002607 break;
2608
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002609 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002610 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002611 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002612 // Implement float negation with an exclusive or with value
2613 // 0x80000000 (mask for bit 31, representing the sign of a
2614 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002615 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002616 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002617 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002618 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002619
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002620 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002621 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002622 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002623 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002624 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002625 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002626 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002627 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002628 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002629 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002630
2631 default:
2632 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2633 }
2634}
2635
Roland Levillaindff1f282014-11-05 14:15:05 +00002636void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2637 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002638 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002639 DataType::Type result_type = conversion->GetResultType();
2640 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002641 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2642 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002643
Roland Levillaindff1f282014-11-05 14:15:05 +00002644 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002645 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002646 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002647 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002648 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002649 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2650 locations->SetInAt(0, Location::Any());
2651 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002652 break;
2653
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002654 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002655 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002656 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002657 locations->SetInAt(0, Location::Any());
2658 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2659 break;
2660
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002661 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002662 locations->SetInAt(0, Location::RequiresFpuRegister());
2663 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002664 break;
2665
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002666 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002667 locations->SetInAt(0, Location::RequiresFpuRegister());
2668 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002669 break;
2670
2671 default:
2672 LOG(FATAL) << "Unexpected type conversion from " << input_type
2673 << " to " << result_type;
2674 }
2675 break;
2676
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002677 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002678 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002679 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002680 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002682 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002683 case DataType::Type::kInt16:
2684 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002685 // TODO: We would benefit from a (to-be-implemented)
2686 // Location::RegisterOrStackSlot requirement for this input.
2687 locations->SetInAt(0, Location::RequiresRegister());
2688 locations->SetOut(Location::RequiresRegister());
2689 break;
2690
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002691 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002692 locations->SetInAt(0, Location::RequiresFpuRegister());
2693 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002694 break;
2695
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002696 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002697 locations->SetInAt(0, Location::RequiresFpuRegister());
2698 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002699 break;
2700
2701 default:
2702 LOG(FATAL) << "Unexpected type conversion from " << input_type
2703 << " to " << result_type;
2704 }
2705 break;
2706
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002707 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002708 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002709 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002710 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002711 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002712 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002713 case DataType::Type::kInt16:
2714 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002715 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002716 locations->SetOut(Location::RequiresFpuRegister());
2717 break;
2718
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002719 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002720 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002721 locations->SetOut(Location::RequiresFpuRegister());
2722 break;
2723
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002724 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002725 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002726 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002727 break;
2728
2729 default:
2730 LOG(FATAL) << "Unexpected type conversion from " << input_type
2731 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002732 }
Roland Levillaincff13742014-11-17 14:32:17 +00002733 break;
2734
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002735 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002736 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002737 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002738 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002739 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002740 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002741 case DataType::Type::kInt16:
2742 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002743 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002744 locations->SetOut(Location::RequiresFpuRegister());
2745 break;
2746
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002747 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002748 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002749 locations->SetOut(Location::RequiresFpuRegister());
2750 break;
2751
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002752 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002753 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002754 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002755 break;
2756
2757 default:
2758 LOG(FATAL) << "Unexpected type conversion from " << input_type
2759 << " to " << result_type;
2760 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002761 break;
2762
2763 default:
2764 LOG(FATAL) << "Unexpected type conversion from " << input_type
2765 << " to " << result_type;
2766 }
2767}
2768
2769void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2770 LocationSummary* locations = conversion->GetLocations();
2771 Location out = locations->Out();
2772 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002773 DataType::Type result_type = conversion->GetResultType();
2774 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002775 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2776 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002777 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002778 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002779 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002780 case DataType::Type::kInt8:
2781 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002782 case DataType::Type::kInt16:
2783 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002784 case DataType::Type::kInt64:
2785 if (in.IsRegister()) {
2786 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2787 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2788 __ movzxb(out.AsRegister<CpuRegister>(),
2789 Address(CpuRegister(RSP), in.GetStackIndex()));
2790 } else {
2791 __ movl(out.AsRegister<CpuRegister>(),
2792 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2793 }
2794 break;
2795
2796 default:
2797 LOG(FATAL) << "Unexpected type conversion from " << input_type
2798 << " to " << result_type;
2799 }
2800 break;
2801
2802 case DataType::Type::kInt8:
2803 switch (input_type) {
2804 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002805 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002806 case DataType::Type::kInt16:
2807 case DataType::Type::kInt32:
2808 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002809 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002810 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002811 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002812 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002813 Address(CpuRegister(RSP), in.GetStackIndex()));
2814 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002815 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002816 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002817 }
2818 break;
2819
2820 default:
2821 LOG(FATAL) << "Unexpected type conversion from " << input_type
2822 << " to " << result_type;
2823 }
2824 break;
2825
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002826 case DataType::Type::kUint16:
2827 switch (input_type) {
2828 case DataType::Type::kInt8:
2829 case DataType::Type::kInt16:
2830 case DataType::Type::kInt32:
2831 case DataType::Type::kInt64:
2832 if (in.IsRegister()) {
2833 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2834 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2835 __ movzxw(out.AsRegister<CpuRegister>(),
2836 Address(CpuRegister(RSP), in.GetStackIndex()));
2837 } else {
2838 __ movl(out.AsRegister<CpuRegister>(),
2839 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2840 }
2841 break;
2842
2843 default:
2844 LOG(FATAL) << "Unexpected type conversion from " << input_type
2845 << " to " << result_type;
2846 }
2847 break;
2848
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002849 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002850 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002851 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002852 case DataType::Type::kInt32:
2853 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002854 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002855 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002856 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002857 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002858 Address(CpuRegister(RSP), in.GetStackIndex()));
2859 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002860 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002861 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002862 }
2863 break;
2864
2865 default:
2866 LOG(FATAL) << "Unexpected type conversion from " << input_type
2867 << " to " << result_type;
2868 }
2869 break;
2870
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002871 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002872 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002873 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002874 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002875 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002876 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002877 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002878 Address(CpuRegister(RSP), in.GetStackIndex()));
2879 } else {
2880 DCHECK(in.IsConstant());
2881 DCHECK(in.GetConstant()->IsLongConstant());
2882 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002883 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002884 }
2885 break;
2886
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002887 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002888 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2889 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002890 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002891
2892 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002893 // if input >= (float)INT_MAX goto done
2894 __ comiss(input, codegen_->LiteralFloatAddress(kPrimIntMax));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002895 __ j(kAboveEqual, &done);
2896 // if input == NaN goto nan
2897 __ j(kUnordered, &nan);
2898 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002899 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002900 __ jmp(&done);
2901 __ Bind(&nan);
2902 // output = 0
2903 __ xorl(output, output);
2904 __ Bind(&done);
2905 break;
2906 }
2907
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002908 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002909 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2910 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002911 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002912
2913 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002914 // if input >= (double)INT_MAX goto done
2915 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002916 __ j(kAboveEqual, &done);
2917 // if input == NaN goto nan
2918 __ j(kUnordered, &nan);
2919 // output = double-to-int-truncate(input)
2920 __ cvttsd2si(output, input);
2921 __ jmp(&done);
2922 __ Bind(&nan);
2923 // output = 0
2924 __ xorl(output, output);
2925 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002926 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002927 }
Roland Levillain946e1432014-11-11 17:35:19 +00002928
2929 default:
2930 LOG(FATAL) << "Unexpected type conversion from " << input_type
2931 << " to " << result_type;
2932 }
2933 break;
2934
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002935 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002936 switch (input_type) {
2937 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002938 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002939 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002940 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002941 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002942 case DataType::Type::kInt16:
2943 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002944 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002945 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002946 break;
2947
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002948 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00002949 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2950 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002951 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002952
Mark Mendell92e83bf2015-05-07 11:25:03 -04002953 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002954 // if input >= (float)LONG_MAX goto done
2955 __ comiss(input, codegen_->LiteralFloatAddress(kPrimLongMax));
Roland Levillain624279f2014-12-04 11:54:28 +00002956 __ j(kAboveEqual, &done);
2957 // if input == NaN goto nan
2958 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002959 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002960 __ cvttss2si(output, input, true);
2961 __ jmp(&done);
2962 __ Bind(&nan);
2963 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002964 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00002965 __ Bind(&done);
2966 break;
2967 }
2968
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002969 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002970 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2971 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002972 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002973
Mark Mendell92e83bf2015-05-07 11:25:03 -04002974 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002975 // if input >= (double)LONG_MAX goto done
2976 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimLongMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002977 __ j(kAboveEqual, &done);
2978 // if input == NaN goto nan
2979 __ j(kUnordered, &nan);
2980 // output = double-to-long-truncate(input)
2981 __ cvttsd2si(output, input, true);
2982 __ jmp(&done);
2983 __ Bind(&nan);
2984 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04002985 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002986 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00002987 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002988 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002989
2990 default:
2991 LOG(FATAL) << "Unexpected type conversion from " << input_type
2992 << " to " << result_type;
2993 }
2994 break;
2995
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002996 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002997 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002998 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002999 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003000 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003001 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003002 case DataType::Type::kInt16:
3003 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003004 if (in.IsRegister()) {
3005 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3006 } else if (in.IsConstant()) {
3007 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3008 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003009 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003010 } else {
3011 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3012 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3013 }
Roland Levillaincff13742014-11-17 14:32:17 +00003014 break;
3015
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003016 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003017 if (in.IsRegister()) {
3018 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3019 } else if (in.IsConstant()) {
3020 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3021 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003022 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003023 } else {
3024 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3025 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3026 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003027 break;
3028
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003029 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003030 if (in.IsFpuRegister()) {
3031 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3032 } else if (in.IsConstant()) {
3033 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3034 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003035 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003036 } else {
3037 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3038 Address(CpuRegister(RSP), in.GetStackIndex()));
3039 }
Roland Levillaincff13742014-11-17 14:32:17 +00003040 break;
3041
3042 default:
3043 LOG(FATAL) << "Unexpected type conversion from " << input_type
3044 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003045 }
Roland Levillaincff13742014-11-17 14:32:17 +00003046 break;
3047
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003048 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003049 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003050 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003051 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003052 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003053 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003054 case DataType::Type::kInt16:
3055 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003056 if (in.IsRegister()) {
3057 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3058 } else if (in.IsConstant()) {
3059 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3060 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003061 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003062 } else {
3063 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3064 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3065 }
Roland Levillaincff13742014-11-17 14:32:17 +00003066 break;
3067
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003068 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003069 if (in.IsRegister()) {
3070 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3071 } else if (in.IsConstant()) {
3072 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3073 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003074 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003075 } else {
3076 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3077 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3078 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003079 break;
3080
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003081 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003082 if (in.IsFpuRegister()) {
3083 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3084 } else if (in.IsConstant()) {
3085 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3086 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003087 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003088 } else {
3089 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3090 Address(CpuRegister(RSP), in.GetStackIndex()));
3091 }
Roland Levillaincff13742014-11-17 14:32:17 +00003092 break;
3093
3094 default:
3095 LOG(FATAL) << "Unexpected type conversion from " << input_type
3096 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003097 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003098 break;
3099
3100 default:
3101 LOG(FATAL) << "Unexpected type conversion from " << input_type
3102 << " to " << result_type;
3103 }
3104}
3105
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003106void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003107 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003108 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003109 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003110 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003111 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003112 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3113 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003114 break;
3115 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003116
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003117 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003118 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003119 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003120 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003121 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003122 break;
3123 }
3124
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003125 case DataType::Type::kFloat64:
3126 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003127 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003128 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003129 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003130 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003131 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003132
3133 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003134 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003135 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003136}
3137
3138void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3139 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003140 Location first = locations->InAt(0);
3141 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003142 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003143
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003144 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003145 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003146 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003147 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3148 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003149 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3150 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003151 } else {
3152 __ leal(out.AsRegister<CpuRegister>(), Address(
3153 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3154 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003155 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003156 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3157 __ addl(out.AsRegister<CpuRegister>(),
3158 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3159 } else {
3160 __ leal(out.AsRegister<CpuRegister>(), Address(
3161 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3162 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003163 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003164 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003165 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003166 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003167 break;
3168 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003169
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003170 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003171 if (second.IsRegister()) {
3172 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3173 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003174 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3175 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003176 } else {
3177 __ leaq(out.AsRegister<CpuRegister>(), Address(
3178 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3179 }
3180 } else {
3181 DCHECK(second.IsConstant());
3182 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3183 int32_t int32_value = Low32Bits(value);
3184 DCHECK_EQ(int32_value, value);
3185 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3186 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3187 } else {
3188 __ leaq(out.AsRegister<CpuRegister>(), Address(
3189 first.AsRegister<CpuRegister>(), int32_value));
3190 }
3191 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003192 break;
3193 }
3194
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003195 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003196 if (second.IsFpuRegister()) {
3197 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3198 } else if (second.IsConstant()) {
3199 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003200 codegen_->LiteralFloatAddress(
3201 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003202 } else {
3203 DCHECK(second.IsStackSlot());
3204 __ addss(first.AsFpuRegister<XmmRegister>(),
3205 Address(CpuRegister(RSP), second.GetStackIndex()));
3206 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003207 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003208 }
3209
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003210 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003211 if (second.IsFpuRegister()) {
3212 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3213 } else if (second.IsConstant()) {
3214 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003215 codegen_->LiteralDoubleAddress(
3216 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003217 } else {
3218 DCHECK(second.IsDoubleStackSlot());
3219 __ addsd(first.AsFpuRegister<XmmRegister>(),
3220 Address(CpuRegister(RSP), second.GetStackIndex()));
3221 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003222 break;
3223 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003224
3225 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003226 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003227 }
3228}
3229
3230void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003231 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003232 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003233 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003234 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003235 locations->SetInAt(0, Location::RequiresRegister());
3236 locations->SetInAt(1, Location::Any());
3237 locations->SetOut(Location::SameAsFirstInput());
3238 break;
3239 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003240 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003241 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003242 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003243 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003244 break;
3245 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003246 case DataType::Type::kFloat32:
3247 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003248 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003249 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003250 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003251 break;
Calin Juravle11351682014-10-23 15:38:15 +01003252 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003253 default:
Calin Juravle11351682014-10-23 15:38:15 +01003254 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003255 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003256}
3257
3258void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3259 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003260 Location first = locations->InAt(0);
3261 Location second = locations->InAt(1);
3262 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003263 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003264 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003265 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003266 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003267 } else if (second.IsConstant()) {
3268 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003269 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003270 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003271 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003272 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003273 break;
3274 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003275 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003276 if (second.IsConstant()) {
3277 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3278 DCHECK(IsInt<32>(value));
3279 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3280 } else {
3281 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3282 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003283 break;
3284 }
3285
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003286 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003287 if (second.IsFpuRegister()) {
3288 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3289 } else if (second.IsConstant()) {
3290 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003291 codegen_->LiteralFloatAddress(
3292 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003293 } else {
3294 DCHECK(second.IsStackSlot());
3295 __ subss(first.AsFpuRegister<XmmRegister>(),
3296 Address(CpuRegister(RSP), second.GetStackIndex()));
3297 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003298 break;
Calin Juravle11351682014-10-23 15:38:15 +01003299 }
3300
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003301 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003302 if (second.IsFpuRegister()) {
3303 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3304 } else if (second.IsConstant()) {
3305 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003306 codegen_->LiteralDoubleAddress(
3307 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003308 } else {
3309 DCHECK(second.IsDoubleStackSlot());
3310 __ subsd(first.AsFpuRegister<XmmRegister>(),
3311 Address(CpuRegister(RSP), second.GetStackIndex()));
3312 }
Calin Juravle11351682014-10-23 15:38:15 +01003313 break;
3314 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003315
3316 default:
Calin Juravle11351682014-10-23 15:38:15 +01003317 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003318 }
3319}
3320
Calin Juravle34bacdf2014-10-07 20:23:36 +01003321void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3322 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003323 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003324 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003325 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003326 locations->SetInAt(0, Location::RequiresRegister());
3327 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003328 if (mul->InputAt(1)->IsIntConstant()) {
3329 // Can use 3 operand multiply.
3330 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3331 } else {
3332 locations->SetOut(Location::SameAsFirstInput());
3333 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003334 break;
3335 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003336 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003337 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003338 locations->SetInAt(1, Location::Any());
3339 if (mul->InputAt(1)->IsLongConstant() &&
3340 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003341 // Can use 3 operand multiply.
3342 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3343 } else {
3344 locations->SetOut(Location::SameAsFirstInput());
3345 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003346 break;
3347 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003348 case DataType::Type::kFloat32:
3349 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003350 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003351 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003352 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003353 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003354 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003355
3356 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003357 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003358 }
3359}
3360
3361void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3362 LocationSummary* locations = mul->GetLocations();
3363 Location first = locations->InAt(0);
3364 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003365 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003366 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003367 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003368 // The constant may have ended up in a register, so test explicitly to avoid
3369 // problems where the output may not be the same as the first operand.
3370 if (mul->InputAt(1)->IsIntConstant()) {
3371 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3372 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3373 } else if (second.IsRegister()) {
3374 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003375 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003376 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003377 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003378 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003379 __ imull(first.AsRegister<CpuRegister>(),
3380 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003381 }
3382 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003383 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003384 // The constant may have ended up in a register, so test explicitly to avoid
3385 // problems where the output may not be the same as the first operand.
3386 if (mul->InputAt(1)->IsLongConstant()) {
3387 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3388 if (IsInt<32>(value)) {
3389 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3390 Immediate(static_cast<int32_t>(value)));
3391 } else {
3392 // Have to use the constant area.
3393 DCHECK(first.Equals(out));
3394 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3395 }
3396 } else if (second.IsRegister()) {
3397 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003398 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003399 } else {
3400 DCHECK(second.IsDoubleStackSlot());
3401 DCHECK(first.Equals(out));
3402 __ imulq(first.AsRegister<CpuRegister>(),
3403 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003404 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003405 break;
3406 }
3407
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003408 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003409 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003410 if (second.IsFpuRegister()) {
3411 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3412 } else if (second.IsConstant()) {
3413 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003414 codegen_->LiteralFloatAddress(
3415 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003416 } else {
3417 DCHECK(second.IsStackSlot());
3418 __ mulss(first.AsFpuRegister<XmmRegister>(),
3419 Address(CpuRegister(RSP), second.GetStackIndex()));
3420 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003421 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003422 }
3423
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003424 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003425 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003426 if (second.IsFpuRegister()) {
3427 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3428 } else if (second.IsConstant()) {
3429 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003430 codegen_->LiteralDoubleAddress(
3431 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003432 } else {
3433 DCHECK(second.IsDoubleStackSlot());
3434 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3435 Address(CpuRegister(RSP), second.GetStackIndex()));
3436 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003437 break;
3438 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003439
3440 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003441 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003442 }
3443}
3444
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003445void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3446 uint32_t stack_adjustment, bool is_float) {
3447 if (source.IsStackSlot()) {
3448 DCHECK(is_float);
3449 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3450 } else if (source.IsDoubleStackSlot()) {
3451 DCHECK(!is_float);
3452 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3453 } else {
3454 // Write the value to the temporary location on the stack and load to FP stack.
3455 if (is_float) {
3456 Location stack_temp = Location::StackSlot(temp_offset);
3457 codegen_->Move(stack_temp, source);
3458 __ flds(Address(CpuRegister(RSP), temp_offset));
3459 } else {
3460 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3461 codegen_->Move(stack_temp, source);
3462 __ fldl(Address(CpuRegister(RSP), temp_offset));
3463 }
3464 }
3465}
3466
3467void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003468 DataType::Type type = rem->GetResultType();
3469 bool is_float = type == DataType::Type::kFloat32;
3470 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003471 LocationSummary* locations = rem->GetLocations();
3472 Location first = locations->InAt(0);
3473 Location second = locations->InAt(1);
3474 Location out = locations->Out();
3475
3476 // Create stack space for 2 elements.
3477 // TODO: enhance register allocator to ask for stack temporaries.
3478 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3479
3480 // Load the values to the FP stack in reverse order, using temporaries if needed.
3481 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3482 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3483
3484 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003485 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003486 __ Bind(&retry);
3487 __ fprem();
3488
3489 // Move FP status to AX.
3490 __ fstsw();
3491
3492 // And see if the argument reduction is complete. This is signaled by the
3493 // C2 FPU flag bit set to 0.
3494 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3495 __ j(kNotEqual, &retry);
3496
3497 // We have settled on the final value. Retrieve it into an XMM register.
3498 // Store FP top of stack to real stack.
3499 if (is_float) {
3500 __ fsts(Address(CpuRegister(RSP), 0));
3501 } else {
3502 __ fstl(Address(CpuRegister(RSP), 0));
3503 }
3504
3505 // Pop the 2 items from the FP stack.
3506 __ fucompp();
3507
3508 // Load the value from the stack into an XMM register.
3509 DCHECK(out.IsFpuRegister()) << out;
3510 if (is_float) {
3511 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3512 } else {
3513 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3514 }
3515
3516 // And remove the temporary stack space we allocated.
3517 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3518}
3519
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003520void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3521 DCHECK(instruction->IsDiv() || instruction->IsRem());
3522
3523 LocationSummary* locations = instruction->GetLocations();
3524 Location second = locations->InAt(1);
3525 DCHECK(second.IsConstant());
3526
3527 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3528 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003529 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003530
3531 DCHECK(imm == 1 || imm == -1);
3532
3533 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003534 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003535 if (instruction->IsRem()) {
3536 __ xorl(output_register, output_register);
3537 } else {
3538 __ movl(output_register, input_register);
3539 if (imm == -1) {
3540 __ negl(output_register);
3541 }
3542 }
3543 break;
3544 }
3545
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003546 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003547 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003548 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003549 } else {
3550 __ movq(output_register, input_register);
3551 if (imm == -1) {
3552 __ negq(output_register);
3553 }
3554 }
3555 break;
3556 }
3557
3558 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003559 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003560 }
3561}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303562void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3563 LocationSummary* locations = instruction->GetLocations();
3564 Location second = locations->InAt(1);
3565 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3566 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3567 int64_t imm = Int64FromConstant(second.GetConstant());
3568 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3569 uint64_t abs_imm = AbsOrMin(imm);
3570 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3571 if (instruction->GetResultType() == DataType::Type::kInt32) {
3572 NearLabel done;
3573 __ movl(out, numerator);
3574 __ andl(out, Immediate(abs_imm-1));
3575 __ j(Condition::kZero, &done);
3576 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3577 __ testl(numerator, numerator);
3578 __ cmov(Condition::kLess, out, tmp, false);
3579 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003580
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303581 } else {
3582 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3583 codegen_->Load64BitValue(tmp, abs_imm - 1);
3584 NearLabel done;
3585
3586 __ movq(out, numerator);
3587 __ andq(out, tmp);
3588 __ j(Condition::kZero, &done);
3589 __ movq(tmp, numerator);
3590 __ sarq(tmp, Immediate(63));
3591 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3592 __ orq(out, tmp);
3593 __ Bind(&done);
3594 }
3595}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003596void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003597 LocationSummary* locations = instruction->GetLocations();
3598 Location second = locations->InAt(1);
3599
3600 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3601 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3602
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003603 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003604 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3605 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003606
3607 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3608
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003609 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303610 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3611 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3612 if (abs_imm == 2) {
3613 __ leal(tmp, Address(numerator, 0));
3614 __ shrl(tmp, Immediate(31));
3615 __ addl(tmp, numerator);
3616 } else {
3617 __ leal(tmp, Address(numerator, abs_imm - 1));
3618 __ testl(numerator, numerator);
3619 __ cmov(kGreaterEqual, tmp, numerator);
3620 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003621 int shift = CTZ(imm);
3622 __ sarl(tmp, Immediate(shift));
3623
3624 if (imm < 0) {
3625 __ negl(tmp);
3626 }
3627
3628 __ movl(output_register, tmp);
3629 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003630 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003631 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303632 if (abs_imm == 2) {
3633 __ movq(rdx, numerator);
3634 __ shrq(rdx, Immediate(63));
3635 __ addq(rdx, numerator);
3636 } else {
3637 codegen_->Load64BitValue(rdx, abs_imm - 1);
3638 __ addq(rdx, numerator);
3639 __ testq(numerator, numerator);
3640 __ cmov(kGreaterEqual, rdx, numerator);
3641 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003642 int shift = CTZ(imm);
3643 __ sarq(rdx, Immediate(shift));
3644
3645 if (imm < 0) {
3646 __ negq(rdx);
3647 }
3648
3649 __ movq(output_register, rdx);
3650 }
3651}
3652
3653void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3654 DCHECK(instruction->IsDiv() || instruction->IsRem());
3655
3656 LocationSummary* locations = instruction->GetLocations();
3657 Location second = locations->InAt(1);
3658
3659 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3660 : locations->GetTemp(0).AsRegister<CpuRegister>();
3661 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3662 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3663 : locations->Out().AsRegister<CpuRegister>();
3664 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3665
3666 DCHECK_EQ(RAX, eax.AsRegister());
3667 DCHECK_EQ(RDX, edx.AsRegister());
3668 if (instruction->IsDiv()) {
3669 DCHECK_EQ(RAX, out.AsRegister());
3670 } else {
3671 DCHECK_EQ(RDX, out.AsRegister());
3672 }
3673
3674 int64_t magic;
3675 int shift;
3676
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003677 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003678 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003679 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3680
Andreas Gampe3db70682018-12-26 15:12:03 -08003681 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003682
3683 __ movl(numerator, eax);
3684
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003685 __ movl(eax, Immediate(magic));
3686 __ imull(numerator);
3687
3688 if (imm > 0 && magic < 0) {
3689 __ addl(edx, numerator);
3690 } else if (imm < 0 && magic > 0) {
3691 __ subl(edx, numerator);
3692 }
3693
3694 if (shift != 0) {
3695 __ sarl(edx, Immediate(shift));
3696 }
3697
3698 __ movl(eax, edx);
3699 __ shrl(edx, Immediate(31));
3700 __ addl(edx, eax);
3701
3702 if (instruction->IsRem()) {
3703 __ movl(eax, numerator);
3704 __ imull(edx, Immediate(imm));
3705 __ subl(eax, edx);
3706 __ movl(edx, eax);
3707 } else {
3708 __ movl(eax, edx);
3709 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003710 } else {
3711 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3712
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003713 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003714
3715 CpuRegister rax = eax;
3716 CpuRegister rdx = edx;
3717
Andreas Gampe3db70682018-12-26 15:12:03 -08003718 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003719
3720 // Save the numerator.
3721 __ movq(numerator, rax);
3722
3723 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003724 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003725
3726 // RDX:RAX = magic * numerator
3727 __ imulq(numerator);
3728
3729 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003730 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003731 __ addq(rdx, numerator);
3732 } else if (imm < 0 && magic > 0) {
3733 // RDX -= numerator
3734 __ subq(rdx, numerator);
3735 }
3736
3737 // Shift if needed.
3738 if (shift != 0) {
3739 __ sarq(rdx, Immediate(shift));
3740 }
3741
3742 // RDX += 1 if RDX < 0
3743 __ movq(rax, rdx);
3744 __ shrq(rdx, Immediate(63));
3745 __ addq(rdx, rax);
3746
3747 if (instruction->IsRem()) {
3748 __ movq(rax, numerator);
3749
3750 if (IsInt<32>(imm)) {
3751 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3752 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003753 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003754 }
3755
3756 __ subq(rax, rdx);
3757 __ movq(rdx, rax);
3758 } else {
3759 __ movq(rax, rdx);
3760 }
3761 }
3762}
3763
Calin Juravlebacfec32014-11-14 15:54:36 +00003764void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3765 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003766 DataType::Type type = instruction->GetResultType();
3767 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003768
3769 bool is_div = instruction->IsDiv();
3770 LocationSummary* locations = instruction->GetLocations();
3771
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003772 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3773 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003774
Roland Levillain271ab9c2014-11-27 15:23:57 +00003775 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003776 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003777
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003778 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003779 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003780
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003781 if (imm == 0) {
3782 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3783 } else if (imm == 1 || imm == -1) {
3784 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303785 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3786 if (is_div) {
3787 DivByPowerOfTwo(instruction->AsDiv());
3788 } else {
3789 RemByPowerOfTwo(instruction->AsRem());
3790 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003791 } else {
3792 DCHECK(imm <= -2 || imm >= 2);
3793 GenerateDivRemWithAnyConstant(instruction);
3794 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003795 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003796 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003797 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003798 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003799 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003800
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003801 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3802 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3803 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3804 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003805 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003806 __ cmpl(second_reg, Immediate(-1));
3807 __ j(kEqual, slow_path->GetEntryLabel());
3808 // edx:eax <- sign-extended of eax
3809 __ cdq();
3810 // eax = quotient, edx = remainder
3811 __ idivl(second_reg);
3812 } else {
3813 __ cmpq(second_reg, Immediate(-1));
3814 __ j(kEqual, slow_path->GetEntryLabel());
3815 // rdx:rax <- sign-extended of rax
3816 __ cqo();
3817 // rax = quotient, rdx = remainder
3818 __ idivq(second_reg);
3819 }
3820 __ Bind(slow_path->GetExitLabel());
3821 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003822}
3823
Calin Juravle7c4954d2014-10-28 16:57:40 +00003824void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3825 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003826 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003827 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003828 case DataType::Type::kInt32:
3829 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003830 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003831 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003832 locations->SetOut(Location::SameAsFirstInput());
3833 // Intel uses edx:eax as the dividend.
3834 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003835 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3836 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3837 // output and request another temp.
3838 if (div->InputAt(1)->IsConstant()) {
3839 locations->AddTemp(Location::RequiresRegister());
3840 }
Calin Juravled0d48522014-11-04 16:40:20 +00003841 break;
3842 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003843
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003844 case DataType::Type::kFloat32:
3845 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003846 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003847 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003848 locations->SetOut(Location::SameAsFirstInput());
3849 break;
3850 }
3851
3852 default:
3853 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3854 }
3855}
3856
3857void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3858 LocationSummary* locations = div->GetLocations();
3859 Location first = locations->InAt(0);
3860 Location second = locations->InAt(1);
3861 DCHECK(first.Equals(locations->Out()));
3862
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003863 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003864 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003865 case DataType::Type::kInt32:
3866 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003867 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003868 break;
3869 }
3870
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003871 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003872 if (second.IsFpuRegister()) {
3873 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3874 } else if (second.IsConstant()) {
3875 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003876 codegen_->LiteralFloatAddress(
3877 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003878 } else {
3879 DCHECK(second.IsStackSlot());
3880 __ divss(first.AsFpuRegister<XmmRegister>(),
3881 Address(CpuRegister(RSP), second.GetStackIndex()));
3882 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003883 break;
3884 }
3885
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003886 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003887 if (second.IsFpuRegister()) {
3888 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3889 } else if (second.IsConstant()) {
3890 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003891 codegen_->LiteralDoubleAddress(
3892 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003893 } else {
3894 DCHECK(second.IsDoubleStackSlot());
3895 __ divsd(first.AsFpuRegister<XmmRegister>(),
3896 Address(CpuRegister(RSP), second.GetStackIndex()));
3897 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003898 break;
3899 }
3900
3901 default:
3902 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3903 }
3904}
3905
Calin Juravlebacfec32014-11-14 15:54:36 +00003906void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003907 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003908 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003909 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003910
3911 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003912 case DataType::Type::kInt32:
3913 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003914 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003915 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003916 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3917 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003918 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3919 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3920 // output and request another temp.
3921 if (rem->InputAt(1)->IsConstant()) {
3922 locations->AddTemp(Location::RequiresRegister());
3923 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003924 break;
3925 }
3926
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003927 case DataType::Type::kFloat32:
3928 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003929 locations->SetInAt(0, Location::Any());
3930 locations->SetInAt(1, Location::Any());
3931 locations->SetOut(Location::RequiresFpuRegister());
3932 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003933 break;
3934 }
3935
3936 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003937 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003938 }
3939}
3940
3941void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003942 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003943 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003944 case DataType::Type::kInt32:
3945 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003946 GenerateDivRemIntegral(rem);
3947 break;
3948 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003949 case DataType::Type::kFloat32:
3950 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003951 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003952 break;
3953 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003954 default:
3955 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3956 }
3957}
3958
Aart Bik1f8d51b2018-02-15 10:42:37 -08003959static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
3960 LocationSummary* locations = new (allocator) LocationSummary(minmax);
3961 switch (minmax->GetResultType()) {
3962 case DataType::Type::kInt32:
3963 case DataType::Type::kInt64:
3964 locations->SetInAt(0, Location::RequiresRegister());
3965 locations->SetInAt(1, Location::RequiresRegister());
3966 locations->SetOut(Location::SameAsFirstInput());
3967 break;
3968 case DataType::Type::kFloat32:
3969 case DataType::Type::kFloat64:
3970 locations->SetInAt(0, Location::RequiresFpuRegister());
3971 locations->SetInAt(1, Location::RequiresFpuRegister());
3972 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
3973 // the second input to be the output (we can simply swap inputs).
3974 locations->SetOut(Location::SameAsFirstInput());
3975 break;
3976 default:
3977 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
3978 }
3979}
3980
Aart Bik351df3e2018-03-07 11:54:57 -08003981void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
3982 bool is_min,
3983 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08003984 Location op1_loc = locations->InAt(0);
3985 Location op2_loc = locations->InAt(1);
3986
3987 // Shortcut for same input locations.
3988 if (op1_loc.Equals(op2_loc)) {
3989 // Can return immediately, as op1_loc == out_loc.
3990 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
3991 // a copy here.
3992 DCHECK(locations->Out().Equals(op1_loc));
3993 return;
3994 }
3995
3996 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3997 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
3998
3999 // (out := op1)
4000 // out <=? op2
4001 // if out is min jmp done
4002 // out := op2
4003 // done:
4004
4005 if (type == DataType::Type::kInt64) {
4006 __ cmpq(out, op2);
4007 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4008 } else {
4009 DCHECK_EQ(type, DataType::Type::kInt32);
4010 __ cmpl(out, op2);
4011 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4012 }
4013}
4014
4015void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4016 bool is_min,
4017 DataType::Type type) {
4018 Location op1_loc = locations->InAt(0);
4019 Location op2_loc = locations->InAt(1);
4020 Location out_loc = locations->Out();
4021 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4022
4023 // Shortcut for same input locations.
4024 if (op1_loc.Equals(op2_loc)) {
4025 DCHECK(out_loc.Equals(op1_loc));
4026 return;
4027 }
4028
4029 // (out := op1)
4030 // out <=? op2
4031 // if Nan jmp Nan_label
4032 // if out is min jmp done
4033 // if op2 is min jmp op2_label
4034 // handle -0/+0
4035 // jmp done
4036 // Nan_label:
4037 // out := NaN
4038 // op2_label:
4039 // out := op2
4040 // done:
4041 //
4042 // This removes one jmp, but needs to copy one input (op1) to out.
4043 //
4044 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4045
4046 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4047
4048 NearLabel nan, done, op2_label;
4049 if (type == DataType::Type::kFloat64) {
4050 __ ucomisd(out, op2);
4051 } else {
4052 DCHECK_EQ(type, DataType::Type::kFloat32);
4053 __ ucomiss(out, op2);
4054 }
4055
4056 __ j(Condition::kParityEven, &nan);
4057
4058 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4059 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4060
4061 // Handle 0.0/-0.0.
4062 if (is_min) {
4063 if (type == DataType::Type::kFloat64) {
4064 __ orpd(out, op2);
4065 } else {
4066 __ orps(out, op2);
4067 }
4068 } else {
4069 if (type == DataType::Type::kFloat64) {
4070 __ andpd(out, op2);
4071 } else {
4072 __ andps(out, op2);
4073 }
4074 }
4075 __ jmp(&done);
4076
4077 // NaN handling.
4078 __ Bind(&nan);
4079 if (type == DataType::Type::kFloat64) {
4080 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4081 } else {
4082 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4083 }
4084 __ jmp(&done);
4085
4086 // out := op2;
4087 __ Bind(&op2_label);
4088 if (type == DataType::Type::kFloat64) {
4089 __ movsd(out, op2);
4090 } else {
4091 __ movss(out, op2);
4092 }
4093
4094 // Done.
4095 __ Bind(&done);
4096}
4097
Aart Bik351df3e2018-03-07 11:54:57 -08004098void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4099 DataType::Type type = minmax->GetResultType();
4100 switch (type) {
4101 case DataType::Type::kInt32:
4102 case DataType::Type::kInt64:
4103 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4104 break;
4105 case DataType::Type::kFloat32:
4106 case DataType::Type::kFloat64:
4107 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4108 break;
4109 default:
4110 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4111 }
4112}
4113
Aart Bik1f8d51b2018-02-15 10:42:37 -08004114void LocationsBuilderX86_64::VisitMin(HMin* min) {
4115 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4116}
4117
4118void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004119 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004120}
4121
4122void LocationsBuilderX86_64::VisitMax(HMax* max) {
4123 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4124}
4125
4126void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004127 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004128}
4129
Aart Bik3dad3412018-02-28 12:01:46 -08004130void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4131 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4132 switch (abs->GetResultType()) {
4133 case DataType::Type::kInt32:
4134 case DataType::Type::kInt64:
4135 locations->SetInAt(0, Location::RequiresRegister());
4136 locations->SetOut(Location::SameAsFirstInput());
4137 locations->AddTemp(Location::RequiresRegister());
4138 break;
4139 case DataType::Type::kFloat32:
4140 case DataType::Type::kFloat64:
4141 locations->SetInAt(0, Location::RequiresFpuRegister());
4142 locations->SetOut(Location::SameAsFirstInput());
4143 locations->AddTemp(Location::RequiresFpuRegister());
4144 break;
4145 default:
4146 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4147 }
4148}
4149
4150void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4151 LocationSummary* locations = abs->GetLocations();
4152 switch (abs->GetResultType()) {
4153 case DataType::Type::kInt32: {
4154 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4155 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4156 // Create mask.
4157 __ movl(mask, out);
4158 __ sarl(mask, Immediate(31));
4159 // Add mask.
4160 __ addl(out, mask);
4161 __ xorl(out, mask);
4162 break;
4163 }
4164 case DataType::Type::kInt64: {
4165 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4166 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4167 // Create mask.
4168 __ movq(mask, out);
4169 __ sarq(mask, Immediate(63));
4170 // Add mask.
4171 __ addq(out, mask);
4172 __ xorq(out, mask);
4173 break;
4174 }
4175 case DataType::Type::kFloat32: {
4176 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4177 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4178 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4179 __ andps(out, mask);
4180 break;
4181 }
4182 case DataType::Type::kFloat64: {
4183 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4184 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4185 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4186 __ andpd(out, mask);
4187 break;
4188 }
4189 default:
4190 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4191 }
4192}
4193
Calin Juravled0d48522014-11-04 16:40:20 +00004194void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004195 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004196 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004197}
4198
4199void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004200 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004201 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004202 codegen_->AddSlowPath(slow_path);
4203
4204 LocationSummary* locations = instruction->GetLocations();
4205 Location value = locations->InAt(0);
4206
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004207 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004208 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004209 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004210 case DataType::Type::kInt8:
4211 case DataType::Type::kUint16:
4212 case DataType::Type::kInt16:
4213 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004214 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004215 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004216 __ j(kEqual, slow_path->GetEntryLabel());
4217 } else if (value.IsStackSlot()) {
4218 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4219 __ j(kEqual, slow_path->GetEntryLabel());
4220 } else {
4221 DCHECK(value.IsConstant()) << value;
4222 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004223 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004224 }
4225 }
4226 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004227 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004228 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004229 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004230 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004231 __ j(kEqual, slow_path->GetEntryLabel());
4232 } else if (value.IsDoubleStackSlot()) {
4233 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4234 __ j(kEqual, slow_path->GetEntryLabel());
4235 } else {
4236 DCHECK(value.IsConstant()) << value;
4237 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004238 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004239 }
4240 }
4241 break;
4242 }
4243 default:
4244 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004245 }
Calin Juravled0d48522014-11-04 16:40:20 +00004246}
4247
Calin Juravle9aec02f2014-11-18 23:06:35 +00004248void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4249 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4250
4251 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004252 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004253
4254 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004255 case DataType::Type::kInt32:
4256 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004257 locations->SetInAt(0, Location::RequiresRegister());
4258 // The shift count needs to be in CL.
4259 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4260 locations->SetOut(Location::SameAsFirstInput());
4261 break;
4262 }
4263 default:
4264 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4265 }
4266}
4267
4268void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4269 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4270
4271 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004272 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004273 Location second = locations->InAt(1);
4274
4275 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004276 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004277 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004278 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004279 if (op->IsShl()) {
4280 __ shll(first_reg, second_reg);
4281 } else if (op->IsShr()) {
4282 __ sarl(first_reg, second_reg);
4283 } else {
4284 __ shrl(first_reg, second_reg);
4285 }
4286 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004287 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004288 if (op->IsShl()) {
4289 __ shll(first_reg, imm);
4290 } else if (op->IsShr()) {
4291 __ sarl(first_reg, imm);
4292 } else {
4293 __ shrl(first_reg, imm);
4294 }
4295 }
4296 break;
4297 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004298 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004299 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004300 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004301 if (op->IsShl()) {
4302 __ shlq(first_reg, second_reg);
4303 } else if (op->IsShr()) {
4304 __ sarq(first_reg, second_reg);
4305 } else {
4306 __ shrq(first_reg, second_reg);
4307 }
4308 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004309 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004310 if (op->IsShl()) {
4311 __ shlq(first_reg, imm);
4312 } else if (op->IsShr()) {
4313 __ sarq(first_reg, imm);
4314 } else {
4315 __ shrq(first_reg, imm);
4316 }
4317 }
4318 break;
4319 }
4320 default:
4321 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004322 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004323 }
4324}
4325
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004326void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4327 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004328 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004329
4330 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004331 case DataType::Type::kInt32:
4332 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004333 locations->SetInAt(0, Location::RequiresRegister());
4334 // The shift count needs to be in CL (unless it is a constant).
4335 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4336 locations->SetOut(Location::SameAsFirstInput());
4337 break;
4338 }
4339 default:
4340 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4341 UNREACHABLE();
4342 }
4343}
4344
4345void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4346 LocationSummary* locations = ror->GetLocations();
4347 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4348 Location second = locations->InAt(1);
4349
4350 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004351 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004352 if (second.IsRegister()) {
4353 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4354 __ rorl(first_reg, second_reg);
4355 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004356 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004357 __ rorl(first_reg, imm);
4358 }
4359 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004360 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004361 if (second.IsRegister()) {
4362 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4363 __ rorq(first_reg, second_reg);
4364 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004365 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004366 __ rorq(first_reg, imm);
4367 }
4368 break;
4369 default:
4370 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4371 UNREACHABLE();
4372 }
4373}
4374
Calin Juravle9aec02f2014-11-18 23:06:35 +00004375void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4376 HandleShift(shl);
4377}
4378
4379void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4380 HandleShift(shl);
4381}
4382
4383void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4384 HandleShift(shr);
4385}
4386
4387void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4388 HandleShift(shr);
4389}
4390
4391void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4392 HandleShift(ushr);
4393}
4394
4395void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4396 HandleShift(ushr);
4397}
4398
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004399void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004400 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4401 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004402 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004403 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004404 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004405}
4406
4407void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004408 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4409 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4410 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004411}
4412
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004413void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004414 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4415 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004416 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004417 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004418 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4419 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004420}
4421
4422void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004423 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4424 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004425 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004426 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004427 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004428}
4429
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004430void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004431 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004432 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004433 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4434 if (location.IsStackSlot()) {
4435 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4436 } else if (location.IsDoubleStackSlot()) {
4437 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4438 }
4439 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004440}
4441
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004442void InstructionCodeGeneratorX86_64::VisitParameterValue(
4443 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004444 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004445}
4446
4447void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4448 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004449 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004450 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4451}
4452
4453void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4454 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4455 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004456}
4457
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004458void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4459 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004460 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004461 locations->SetInAt(0, Location::RequiresRegister());
4462 locations->SetOut(Location::RequiresRegister());
4463}
4464
4465void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4466 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004467 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004468 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004469 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004470 __ movq(locations->Out().AsRegister<CpuRegister>(),
4471 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004472 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004473 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004474 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004475 __ movq(locations->Out().AsRegister<CpuRegister>(),
4476 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4477 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004478 __ movq(locations->Out().AsRegister<CpuRegister>(),
4479 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004480 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004481}
4482
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004483void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004484 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004485 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004486 locations->SetInAt(0, Location::RequiresRegister());
4487 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004488}
4489
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004490void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4491 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004492 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4493 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004494 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004495 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004496 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004497 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004498 break;
4499
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004500 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004501 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004502 break;
4503
4504 default:
4505 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4506 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004507}
4508
David Brazdil66d126e2015-04-03 16:02:44 +01004509void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4510 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004511 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004512 locations->SetInAt(0, Location::RequiresRegister());
4513 locations->SetOut(Location::SameAsFirstInput());
4514}
4515
4516void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004517 LocationSummary* locations = bool_not->GetLocations();
4518 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4519 locations->Out().AsRegister<CpuRegister>().AsRegister());
4520 Location out = locations->Out();
4521 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4522}
4523
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004524void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004525 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004526 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004527 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004528 locations->SetInAt(i, Location::Any());
4529 }
4530 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004531}
4532
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004533void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004534 LOG(FATAL) << "Unimplemented";
4535}
4536
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004537void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004538 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004539 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004540 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004541 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4542 */
4543 switch (kind) {
4544 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004545 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004546 break;
4547 }
4548 case MemBarrierKind::kAnyStore:
4549 case MemBarrierKind::kLoadAny:
4550 case MemBarrierKind::kStoreStore: {
4551 // nop
4552 break;
4553 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004554 case MemBarrierKind::kNTStoreStore:
4555 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004556 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004557 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004558 }
4559}
4560
4561void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4562 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4563
Roland Levillain0d5a2812015-11-13 10:07:31 +00004564 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004565 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004566 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004567 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4568 object_field_get_with_read_barrier
4569 ? LocationSummary::kCallOnSlowPath
4570 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004571 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004572 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004573 }
Calin Juravle52c48962014-12-16 17:02:57 +00004574 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004575 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004576 locations->SetOut(Location::RequiresFpuRegister());
4577 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004578 // The output overlaps for an object field get when read barriers
4579 // are enabled: we do not want the move to overwrite the object's
4580 // location, as we need it to emit the read barrier.
4581 locations->SetOut(
4582 Location::RequiresRegister(),
4583 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004584 }
Calin Juravle52c48962014-12-16 17:02:57 +00004585}
4586
4587void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4588 const FieldInfo& field_info) {
4589 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4590
4591 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004592 Location base_loc = locations->InAt(0);
4593 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004594 Location out = locations->Out();
4595 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004596 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4597 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004598 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4599
Vladimir Marko61b92282017-10-11 13:23:17 +01004600 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004601 case DataType::Type::kBool:
4602 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004603 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4604 break;
4605 }
4606
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004607 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004608 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4609 break;
4610 }
4611
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004612 case DataType::Type::kUint16: {
4613 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004614 break;
4615 }
4616
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004617 case DataType::Type::kInt16: {
4618 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004619 break;
4620 }
4621
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004622 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004623 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4624 break;
4625 }
4626
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004627 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004628 // /* HeapReference<Object> */ out = *(base + offset)
4629 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004630 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004631 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004632 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004633 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004634 if (is_volatile) {
4635 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4636 }
4637 } else {
4638 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4639 codegen_->MaybeRecordImplicitNullCheck(instruction);
4640 if (is_volatile) {
4641 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4642 }
4643 // If read barriers are enabled, emit read barriers other than
4644 // Baker's using a slow path (and also unpoison the loaded
4645 // reference, if heap poisoning is enabled).
4646 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4647 }
4648 break;
4649 }
4650
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004651 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004652 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4653 break;
4654 }
4655
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004656 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004657 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4658 break;
4659 }
4660
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004661 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004662 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4663 break;
4664 }
4665
Aart Bik66c158e2018-01-31 12:55:04 -08004666 case DataType::Type::kUint32:
4667 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004668 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004669 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004670 UNREACHABLE();
4671 }
4672
Vladimir Marko61b92282017-10-11 13:23:17 +01004673 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004674 // Potential implicit null checks, in the case of reference
4675 // fields, are handled in the previous switch statement.
4676 } else {
4677 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004678 }
Roland Levillain4d027112015-07-01 15:41:14 +01004679
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004680 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004681 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004682 // Memory barriers, in the case of references, are also handled
4683 // in the previous switch statement.
4684 } else {
4685 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4686 }
Roland Levillain4d027112015-07-01 15:41:14 +01004687 }
Calin Juravle52c48962014-12-16 17:02:57 +00004688}
4689
4690void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4691 const FieldInfo& field_info) {
4692 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4693
4694 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004695 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004696 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004697 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004698 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004699 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004700
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004701 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004702 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004703 if (is_volatile) {
4704 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4705 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4706 } else {
4707 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4708 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004709 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004710 if (is_volatile) {
4711 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4712 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4713 } else {
4714 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4715 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004716 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004717 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004718 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004719 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004720 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004721 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004722 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004723 locations->AddTemp(Location::RequiresRegister());
4724 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004725}
4726
Calin Juravle52c48962014-12-16 17:02:57 +00004727void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004728 const FieldInfo& field_info,
4729 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004730 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4731
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004732 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004733 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4734 Location value = locations->InAt(1);
4735 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004736 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004737 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4738
4739 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004740 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004741 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004742
Mark Mendellea5af682015-10-22 17:35:49 -04004743 bool maybe_record_implicit_null_check_done = false;
4744
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004745 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004746 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004747 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004748 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004749 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004750 __ movb(Address(base, offset),
4751 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004752 } else {
4753 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4754 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004755 break;
4756 }
4757
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004758 case DataType::Type::kUint16:
4759 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004760 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004761 __ movw(Address(base, offset),
4762 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004763 } else {
4764 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4765 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004766 break;
4767 }
4768
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004769 case DataType::Type::kInt32:
4770 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004771 if (value.IsConstant()) {
4772 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004773 // `field_type == DataType::Type::kReference` implies `v == 0`.
4774 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004775 // Note: if heap poisoning is enabled, no need to poison
4776 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004777 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004778 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004779 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004780 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4781 __ movl(temp, value.AsRegister<CpuRegister>());
4782 __ PoisonHeapReference(temp);
4783 __ movl(Address(base, offset), temp);
4784 } else {
4785 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4786 }
Mark Mendell40741f32015-04-20 22:10:34 -04004787 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004788 break;
4789 }
4790
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004791 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004792 if (value.IsConstant()) {
4793 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004794 codegen_->MoveInt64ToAddress(Address(base, offset),
4795 Address(base, offset + sizeof(int32_t)),
4796 v,
4797 instruction);
4798 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004799 } else {
4800 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4801 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004802 break;
4803 }
4804
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004805 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004806 if (value.IsConstant()) {
4807 int32_t v =
4808 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4809 __ movl(Address(base, offset), Immediate(v));
4810 } else {
4811 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4812 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004813 break;
4814 }
4815
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004816 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004817 if (value.IsConstant()) {
4818 int64_t v =
4819 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4820 codegen_->MoveInt64ToAddress(Address(base, offset),
4821 Address(base, offset + sizeof(int32_t)),
4822 v,
4823 instruction);
4824 maybe_record_implicit_null_check_done = true;
4825 } else {
4826 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4827 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004828 break;
4829 }
4830
Aart Bik66c158e2018-01-31 12:55:04 -08004831 case DataType::Type::kUint32:
4832 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004833 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004834 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004835 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004836 }
Calin Juravle52c48962014-12-16 17:02:57 +00004837
Mark Mendellea5af682015-10-22 17:35:49 -04004838 if (!maybe_record_implicit_null_check_done) {
4839 codegen_->MaybeRecordImplicitNullCheck(instruction);
4840 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004841
4842 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4843 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4844 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004845 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004846 }
4847
Calin Juravle52c48962014-12-16 17:02:57 +00004848 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004849 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004850 }
4851}
4852
4853void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4854 HandleFieldSet(instruction, instruction->GetFieldInfo());
4855}
4856
4857void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004858 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004859}
4860
4861void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004862 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004863}
4864
4865void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004866 HandleFieldGet(instruction, instruction->GetFieldInfo());
4867}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004868
Calin Juravle52c48962014-12-16 17:02:57 +00004869void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4870 HandleFieldGet(instruction);
4871}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004872
Calin Juravle52c48962014-12-16 17:02:57 +00004873void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4874 HandleFieldGet(instruction, instruction->GetFieldInfo());
4875}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004876
Calin Juravle52c48962014-12-16 17:02:57 +00004877void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4878 HandleFieldSet(instruction, instruction->GetFieldInfo());
4879}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004880
Calin Juravle52c48962014-12-16 17:02:57 +00004881void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004882 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004883}
4884
Calin Juravlee460d1d2015-09-29 04:52:17 +01004885void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4886 HUnresolvedInstanceFieldGet* instruction) {
4887 FieldAccessCallingConventionX86_64 calling_convention;
4888 codegen_->CreateUnresolvedFieldLocationSummary(
4889 instruction, instruction->GetFieldType(), calling_convention);
4890}
4891
4892void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4893 HUnresolvedInstanceFieldGet* instruction) {
4894 FieldAccessCallingConventionX86_64 calling_convention;
4895 codegen_->GenerateUnresolvedFieldAccess(instruction,
4896 instruction->GetFieldType(),
4897 instruction->GetFieldIndex(),
4898 instruction->GetDexPc(),
4899 calling_convention);
4900}
4901
4902void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4903 HUnresolvedInstanceFieldSet* instruction) {
4904 FieldAccessCallingConventionX86_64 calling_convention;
4905 codegen_->CreateUnresolvedFieldLocationSummary(
4906 instruction, instruction->GetFieldType(), calling_convention);
4907}
4908
4909void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4910 HUnresolvedInstanceFieldSet* instruction) {
4911 FieldAccessCallingConventionX86_64 calling_convention;
4912 codegen_->GenerateUnresolvedFieldAccess(instruction,
4913 instruction->GetFieldType(),
4914 instruction->GetFieldIndex(),
4915 instruction->GetDexPc(),
4916 calling_convention);
4917}
4918
4919void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4920 HUnresolvedStaticFieldGet* instruction) {
4921 FieldAccessCallingConventionX86_64 calling_convention;
4922 codegen_->CreateUnresolvedFieldLocationSummary(
4923 instruction, instruction->GetFieldType(), calling_convention);
4924}
4925
4926void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4927 HUnresolvedStaticFieldGet* instruction) {
4928 FieldAccessCallingConventionX86_64 calling_convention;
4929 codegen_->GenerateUnresolvedFieldAccess(instruction,
4930 instruction->GetFieldType(),
4931 instruction->GetFieldIndex(),
4932 instruction->GetDexPc(),
4933 calling_convention);
4934}
4935
4936void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4937 HUnresolvedStaticFieldSet* instruction) {
4938 FieldAccessCallingConventionX86_64 calling_convention;
4939 codegen_->CreateUnresolvedFieldLocationSummary(
4940 instruction, instruction->GetFieldType(), calling_convention);
4941}
4942
4943void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4944 HUnresolvedStaticFieldSet* instruction) {
4945 FieldAccessCallingConventionX86_64 calling_convention;
4946 codegen_->GenerateUnresolvedFieldAccess(instruction,
4947 instruction->GetFieldType(),
4948 instruction->GetFieldIndex(),
4949 instruction->GetDexPc(),
4950 calling_convention);
4951}
4952
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004953void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004954 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4955 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
4956 ? Location::RequiresRegister()
4957 : Location::Any();
4958 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004959}
4960
Calin Juravle2ae48182016-03-16 14:05:09 +00004961void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4962 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004963 return;
4964 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004965 LocationSummary* locations = instruction->GetLocations();
4966 Location obj = locations->InAt(0);
4967
4968 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00004969 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004970}
4971
Calin Juravle2ae48182016-03-16 14:05:09 +00004972void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004973 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004974 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004975
4976 LocationSummary* locations = instruction->GetLocations();
4977 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004978
4979 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00004980 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004981 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004982 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004983 } else {
4984 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00004985 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01004986 __ jmp(slow_path->GetEntryLabel());
4987 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004988 }
4989 __ j(kEqual, slow_path->GetEntryLabel());
4990}
4991
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004992void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004993 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004994}
4995
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004996void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004997 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004998 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004999 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005000 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5001 object_array_get_with_read_barrier
5002 ? LocationSummary::kCallOnSlowPath
5003 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005004 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005005 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005006 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005007 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005008 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005009 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005010 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5011 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005012 // The output overlaps for an object array get when read barriers
5013 // are enabled: we do not want the move to overwrite the array's
5014 // location, as we need it to emit the read barrier.
5015 locations->SetOut(
5016 Location::RequiresRegister(),
5017 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005018 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005019}
5020
5021void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5022 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005023 Location obj_loc = locations->InAt(0);
5024 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005025 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005026 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005027 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005028
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005029 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005030 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005031 case DataType::Type::kBool:
5032 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005033 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005034 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005035 break;
5036 }
5037
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005038 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005039 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005040 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005041 break;
5042 }
5043
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005044 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005045 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005046 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5047 // Branch cases into compressed and uncompressed for each index's type.
5048 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5049 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005050 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005051 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005052 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5053 "Expecting 0=compressed, 1=uncompressed");
5054 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005055 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5056 __ jmp(&done);
5057 __ Bind(&not_compressed);
5058 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5059 __ Bind(&done);
5060 } else {
5061 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5062 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005063 break;
5064 }
5065
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005066 case DataType::Type::kInt16: {
5067 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5068 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5069 break;
5070 }
5071
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005072 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005073 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005074 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005075 break;
5076 }
5077
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005078 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005079 static_assert(
5080 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5081 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005082 // /* HeapReference<Object> */ out =
5083 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5084 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005085 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005086 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005087 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005088 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005089 } else {
5090 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005091 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5092 codegen_->MaybeRecordImplicitNullCheck(instruction);
5093 // If read barriers are enabled, emit read barriers other than
5094 // Baker's using a slow path (and also unpoison the loaded
5095 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005096 if (index.IsConstant()) {
5097 uint32_t offset =
5098 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005099 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5100 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005101 codegen_->MaybeGenerateReadBarrierSlow(
5102 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5103 }
5104 }
5105 break;
5106 }
5107
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005108 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005109 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005110 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005111 break;
5112 }
5113
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005114 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005115 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005116 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005117 break;
5118 }
5119
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005120 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005121 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005122 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005123 break;
5124 }
5125
Aart Bik66c158e2018-01-31 12:55:04 -08005126 case DataType::Type::kUint32:
5127 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005128 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005129 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005130 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005131 }
Roland Levillain4d027112015-07-01 15:41:14 +01005132
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005133 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005134 // Potential implicit null checks, in the case of reference
5135 // arrays, are handled in the previous switch statement.
5136 } else {
5137 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005138 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005139}
5140
5141void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005142 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005143
5144 bool needs_write_barrier =
5145 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005146 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005147
Vladimir Markoca6fff82017-10-03 14:49:14 +01005148 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005149 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01005150 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005151 LocationSummary::kCallOnSlowPath :
5152 LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005153
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005154 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005155 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005156 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005157 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005158 } else {
5159 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5160 }
5161
5162 if (needs_write_barrier) {
5163 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005164 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005165 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005166 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005167}
5168
5169void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5170 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005171 Location array_loc = locations->InAt(0);
5172 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005173 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005174 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005175 DataType::Type value_type = instruction->GetComponentType();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005176 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005177 bool needs_write_barrier =
5178 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005179 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5180 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5181 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005182
5183 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005184 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005185 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005186 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005187 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005188 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005189 if (value.IsRegister()) {
5190 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005191 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005192 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005193 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005194 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005195 break;
5196 }
5197
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005198 case DataType::Type::kUint16:
5199 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005200 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005201 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005202 if (value.IsRegister()) {
5203 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005204 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005205 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005206 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005207 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005208 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005209 break;
5210 }
5211
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005212 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005213 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005214 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005215
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005216 if (!value.IsRegister()) {
5217 // Just setting null.
5218 DCHECK(instruction->InputAt(2)->IsNullConstant());
5219 DCHECK(value.IsConstant()) << value;
5220 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005221 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005222 DCHECK(!needs_write_barrier);
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005223 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005224 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005225 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005226
5227 DCHECK(needs_write_barrier);
5228 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005229 // We cannot use a NearLabel for `done`, as its range may be too
5230 // short when Baker read barriers are enabled.
5231 Label done;
5232 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005233 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01005234 Location temp_loc = locations->GetTemp(0);
5235 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005236 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005237 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005238 codegen_->AddSlowPath(slow_path);
5239 if (instruction->GetValueCanBeNull()) {
5240 __ testl(register_value, register_value);
5241 __ j(kNotEqual, &not_null);
5242 __ movl(address, Immediate(0));
5243 codegen_->MaybeRecordImplicitNullCheck(instruction);
5244 __ jmp(&done);
5245 __ Bind(&not_null);
5246 }
5247
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005248 // Note that when Baker read barriers are enabled, the type
5249 // checks are performed without read barriers. This is fine,
5250 // even in the case where a class object is in the from-space
5251 // after the flip, as a comparison involving such a type would
5252 // not produce a false positive; it may of course produce a
5253 // false negative, in which case we would take the ArraySet
5254 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005255
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005256 // /* HeapReference<Class> */ temp = array->klass_
5257 __ movl(temp, Address(array, class_offset));
5258 codegen_->MaybeRecordImplicitNullCheck(instruction);
5259 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005260
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005261 // /* HeapReference<Class> */ temp = temp->component_type_
5262 __ movl(temp, Address(temp, component_offset));
5263 // If heap poisoning is enabled, no need to unpoison `temp`
5264 // nor the object reference in `register_value->klass`, as
5265 // we are comparing two poisoned references.
5266 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005267
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005268 if (instruction->StaticTypeOfArrayIsObjectArray()) {
5269 __ j(kEqual, &do_put);
5270 // If heap poisoning is enabled, the `temp` reference has
5271 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005272 __ MaybeUnpoisonHeapReference(temp);
5273
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005274 // If heap poisoning is enabled, no need to unpoison the
5275 // heap reference loaded below, as it is only used for a
5276 // comparison with null.
5277 __ cmpl(Address(temp, super_offset), Immediate(0));
5278 __ j(kNotEqual, slow_path->GetEntryLabel());
5279 __ Bind(&do_put);
5280 } else {
5281 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005282 }
5283 }
5284
5285 if (kPoisonHeapReferences) {
5286 __ movl(temp, register_value);
5287 __ PoisonHeapReference(temp);
5288 __ movl(address, temp);
5289 } else {
5290 __ movl(address, register_value);
5291 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00005292 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005293 codegen_->MaybeRecordImplicitNullCheck(instruction);
5294 }
5295
5296 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5297 codegen_->MarkGCCard(
5298 temp, card, array, value.AsRegister<CpuRegister>(), instruction->GetValueCanBeNull());
5299 __ Bind(&done);
5300
5301 if (slow_path != nullptr) {
5302 __ Bind(slow_path->GetExitLabel());
5303 }
5304
5305 break;
5306 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005307
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005308 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005309 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005310 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005311 if (value.IsRegister()) {
5312 __ movl(address, value.AsRegister<CpuRegister>());
5313 } else {
5314 DCHECK(value.IsConstant()) << value;
5315 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5316 __ movl(address, Immediate(v));
5317 }
5318 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005319 break;
5320 }
5321
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005322 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005323 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005324 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005325 if (value.IsRegister()) {
5326 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005327 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005328 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005329 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005330 Address address_high =
5331 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005332 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005333 }
5334 break;
5335 }
5336
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005337 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005338 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005339 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005340 if (value.IsFpuRegister()) {
5341 __ movss(address, value.AsFpuRegister<XmmRegister>());
5342 } else {
5343 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005344 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005345 __ movl(address, Immediate(v));
5346 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005347 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005348 break;
5349 }
5350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005351 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005352 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005353 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005354 if (value.IsFpuRegister()) {
5355 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5356 codegen_->MaybeRecordImplicitNullCheck(instruction);
5357 } else {
5358 int64_t v =
5359 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005360 Address address_high =
5361 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005362 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5363 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005364 break;
5365 }
5366
Aart Bik66c158e2018-01-31 12:55:04 -08005367 case DataType::Type::kUint32:
5368 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005369 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005370 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005371 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005372 }
5373}
5374
5375void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005376 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005377 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005378 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005379 if (!instruction->IsEmittedAtUseSite()) {
5380 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5381 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005382}
5383
5384void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005385 if (instruction->IsEmittedAtUseSite()) {
5386 return;
5387 }
5388
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005389 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005390 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005391 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5392 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005393 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005394 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005395 // Mask out most significant bit in case the array is String's array of char.
5396 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005397 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005398 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005399}
5400
5401void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005402 RegisterSet caller_saves = RegisterSet::Empty();
5403 InvokeRuntimeCallingConvention calling_convention;
5404 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5405 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5406 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005407 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005408 HInstruction* length = instruction->InputAt(1);
5409 if (!length->IsEmittedAtUseSite()) {
5410 locations->SetInAt(1, Location::RegisterOrConstant(length));
5411 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005412}
5413
5414void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5415 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005416 Location index_loc = locations->InAt(0);
5417 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005418 SlowPathCode* slow_path =
5419 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005420
Mark Mendell99dbd682015-04-22 16:18:52 -04005421 if (length_loc.IsConstant()) {
5422 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5423 if (index_loc.IsConstant()) {
5424 // BCE will remove the bounds check if we are guarenteed to pass.
5425 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5426 if (index < 0 || index >= length) {
5427 codegen_->AddSlowPath(slow_path);
5428 __ jmp(slow_path->GetEntryLabel());
5429 } else {
5430 // Some optimization after BCE may have generated this, and we should not
5431 // generate a bounds check if it is a valid range.
5432 }
5433 return;
5434 }
5435
5436 // We have to reverse the jump condition because the length is the constant.
5437 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5438 __ cmpl(index_reg, Immediate(length));
5439 codegen_->AddSlowPath(slow_path);
5440 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005441 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005442 HInstruction* array_length = instruction->InputAt(1);
5443 if (array_length->IsEmittedAtUseSite()) {
5444 // Address the length field in the array.
5445 DCHECK(array_length->IsArrayLength());
5446 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5447 Location array_loc = array_length->GetLocations()->InAt(0);
5448 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005449 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005450 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5451 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005452 CpuRegister length_reg = CpuRegister(TMP);
5453 __ movl(length_reg, array_len);
5454 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005455 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005456 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005457 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005458 // Checking the bound for general case:
5459 // Array of char or String's array when the compression feature off.
5460 if (index_loc.IsConstant()) {
5461 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5462 __ cmpl(array_len, Immediate(value));
5463 } else {
5464 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5465 }
5466 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005467 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005468 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005469 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005470 }
5471 codegen_->AddSlowPath(slow_path);
5472 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005473 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005474}
5475
5476void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5477 CpuRegister card,
5478 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005479 CpuRegister value,
5480 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005481 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005482 if (value_can_be_null) {
5483 __ testl(value, value);
5484 __ j(kEqual, &is_null);
5485 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005486 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005487 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005488 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005489 // Calculate the offset (in the card table) of the card corresponding to
5490 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005491 __ movq(temp, object);
5492 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005493 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5494 // `object`'s card.
5495 //
5496 // Register `card` contains the address of the card table. Note that the card
5497 // table's base is biased during its creation so that it always starts at an
5498 // address whose least-significant byte is equal to `kCardDirty` (see
5499 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5500 // below writes the `kCardDirty` (byte) value into the `object`'s card
5501 // (located at `card + object >> kCardShift`).
5502 //
5503 // This dual use of the value in register `card` (1. to calculate the location
5504 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5505 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005506 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005507 if (value_can_be_null) {
5508 __ Bind(&is_null);
5509 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005510}
5511
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005512void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005513 LOG(FATAL) << "Unimplemented";
5514}
5515
5516void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005517 if (instruction->GetNext()->IsSuspendCheck() &&
5518 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5519 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5520 // The back edge will generate the suspend check.
5521 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5522 }
5523
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005524 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5525}
5526
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005527void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005528 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5529 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005530 // In suspend check slow path, usually there are no caller-save registers at all.
5531 // If SIMD instructions are present, however, we force spilling all live SIMD
5532 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005533 locations->SetCustomSlowPathCallerSaves(
5534 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005535}
5536
5537void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005538 HBasicBlock* block = instruction->GetBlock();
5539 if (block->GetLoopInformation() != nullptr) {
5540 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5541 // The back edge will generate the suspend check.
5542 return;
5543 }
5544 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5545 // The goto will generate the suspend check.
5546 return;
5547 }
5548 GenerateSuspendCheck(instruction, nullptr);
5549}
5550
5551void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5552 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005553 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005554 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5555 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005556 slow_path =
5557 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005558 instruction->SetSlowPath(slow_path);
5559 codegen_->AddSlowPath(slow_path);
5560 if (successor != nullptr) {
5561 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005562 }
5563 } else {
5564 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5565 }
5566
Andreas Gampe542451c2016-07-26 09:02:02 -07005567 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005568 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005569 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005570 if (successor == nullptr) {
5571 __ j(kNotEqual, slow_path->GetEntryLabel());
5572 __ Bind(slow_path->GetReturnLabel());
5573 } else {
5574 __ j(kEqual, codegen_->GetLabelOf(successor));
5575 __ jmp(slow_path->GetEntryLabel());
5576 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005577}
5578
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005579X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5580 return codegen_->GetAssembler();
5581}
5582
5583void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005584 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005585 Location source = move->GetSource();
5586 Location destination = move->GetDestination();
5587
5588 if (source.IsRegister()) {
5589 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005590 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005591 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005592 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005593 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005594 } else {
5595 DCHECK(destination.IsDoubleStackSlot());
5596 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005597 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005598 }
5599 } else if (source.IsStackSlot()) {
5600 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005601 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005602 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005603 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005604 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005605 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005606 } else {
5607 DCHECK(destination.IsStackSlot());
5608 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5609 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5610 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005611 } else if (source.IsDoubleStackSlot()) {
5612 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005613 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005614 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005615 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005616 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5617 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005618 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005619 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005620 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5621 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5622 }
Aart Bik5576f372017-03-23 16:17:37 -07005623 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005624 if (destination.IsFpuRegister()) {
5625 __ movups(destination.AsFpuRegister<XmmRegister>(),
5626 Address(CpuRegister(RSP), source.GetStackIndex()));
5627 } else {
5628 DCHECK(destination.IsSIMDStackSlot());
5629 size_t high = kX86_64WordSize;
5630 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5631 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5632 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5633 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5634 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005635 } else if (source.IsConstant()) {
5636 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005637 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5638 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005639 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005640 if (value == 0) {
5641 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5642 } else {
5643 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5644 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005645 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005646 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005647 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005648 }
5649 } else if (constant->IsLongConstant()) {
5650 int64_t value = constant->AsLongConstant()->GetValue();
5651 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005652 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005653 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005654 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005655 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005656 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005657 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005658 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005659 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005660 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005661 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005662 } else {
5663 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005664 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005665 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5666 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005667 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005668 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005669 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005670 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005671 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005672 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005673 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005674 } else {
5675 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005676 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005677 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005678 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005679 } else if (source.IsFpuRegister()) {
5680 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005681 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005682 } else if (destination.IsStackSlot()) {
5683 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005684 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005685 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005686 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005687 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005688 } else {
5689 DCHECK(destination.IsSIMDStackSlot());
5690 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5691 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005692 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005693 }
5694}
5695
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005696void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005697 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005698 __ movl(Address(CpuRegister(RSP), mem), reg);
5699 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005700}
5701
Mark Mendell8a1c7282015-06-29 15:41:28 -04005702void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5703 __ movq(CpuRegister(TMP), reg1);
5704 __ movq(reg1, reg2);
5705 __ movq(reg2, CpuRegister(TMP));
5706}
5707
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005708void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5709 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5710 __ movq(Address(CpuRegister(RSP), mem), reg);
5711 __ movq(reg, CpuRegister(TMP));
5712}
5713
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005714void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5715 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5716 __ movss(Address(CpuRegister(RSP), mem), reg);
5717 __ movd(reg, CpuRegister(TMP));
5718}
5719
5720void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5721 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5722 __ movsd(Address(CpuRegister(RSP), mem), reg);
5723 __ movd(reg, CpuRegister(TMP));
5724}
5725
Aart Bikcfe50bb2017-12-12 14:54:12 -08005726void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5727 size_t extra_slot = 2 * kX86_64WordSize;
5728 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5729 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5730 ExchangeMemory64(0, mem + extra_slot, 2);
5731 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5732 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5733}
5734
5735void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5736 ScratchRegisterScope ensure_scratch(
5737 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5738
5739 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5740 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5741 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5742 Address(CpuRegister(RSP), mem2 + stack_offset));
5743 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5744 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5745 CpuRegister(ensure_scratch.GetRegister()));
5746}
5747
5748void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5749 ScratchRegisterScope ensure_scratch(
5750 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5751
5752 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5753
5754 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5755 for (int i = 0; i < num_of_qwords; i++) {
5756 __ movq(CpuRegister(TMP),
5757 Address(CpuRegister(RSP), mem1 + stack_offset));
5758 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5759 Address(CpuRegister(RSP), mem2 + stack_offset));
5760 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5761 CpuRegister(TMP));
5762 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5763 CpuRegister(ensure_scratch.GetRegister()));
5764 stack_offset += kX86_64WordSize;
5765 }
5766}
5767
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005768void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005769 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005770 Location source = move->GetSource();
5771 Location destination = move->GetDestination();
5772
5773 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005774 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005775 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005776 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005777 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005778 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005779 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005780 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005781 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005782 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005783 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005784 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005785 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005786 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005787 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005788 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5789 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5790 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005791 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005792 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005793 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005794 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005795 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005796 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005797 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005798 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005799 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5800 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5801 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5802 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5803 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5804 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005805 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005806 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005807 }
5808}
5809
5810
5811void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5812 __ pushq(CpuRegister(reg));
5813}
5814
5815
5816void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5817 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005818}
5819
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005820void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005821 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005822 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5823 const size_t status_byte_offset =
5824 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
5825 constexpr uint32_t shifted_initialized_value =
5826 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
5827
5828 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005829 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005830 __ Bind(slow_path->GetExitLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005831 // No need for memory fence, thanks to the x86-64 memory model.
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005832}
5833
Vladimir Marko175e7862018-03-27 09:03:13 +00005834void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5835 CpuRegister temp) {
5836 uint32_t path_to_root = check->GetBitstringPathToRoot();
5837 uint32_t mask = check->GetBitstringMask();
5838 DCHECK(IsPowerOfTwo(mask + 1));
5839 size_t mask_bits = WhichPowerOf2(mask + 1);
5840
5841 if (mask_bits == 16u) {
5842 // Compare the bitstring in memory.
5843 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5844 } else {
5845 // /* uint32_t */ temp = temp->status_
5846 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5847 // Compare the bitstring bits using SUB.
5848 __ subl(temp, Immediate(path_to_root));
5849 // Shift out bits that do not contribute to the comparison.
5850 __ shll(temp, Immediate(32u - mask_bits));
5851 }
5852}
5853
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005854HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5855 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005856 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005857 case HLoadClass::LoadKind::kInvalid:
5858 LOG(FATAL) << "UNREACHABLE";
5859 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005860 case HLoadClass::LoadKind::kReferrersClass:
5861 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005862 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005863 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005864 case HLoadClass::LoadKind::kBssEntry:
5865 DCHECK(!Runtime::Current()->UseJitCompilation());
5866 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005867 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005868 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005869 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005870 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005871 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005872 break;
5873 }
5874 return desired_class_load_kind;
5875}
5876
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005877void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005878 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005879 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005880 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005881 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005882 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005883 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005884 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005885 return;
5886 }
Vladimir Marko41559982017-01-06 14:04:23 +00005887 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005888
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005889 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5890 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005891 ? LocationSummary::kCallOnSlowPath
5892 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005893 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005894 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005895 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005896 }
5897
Vladimir Marko41559982017-01-06 14:04:23 +00005898 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005899 locations->SetInAt(0, Location::RequiresRegister());
5900 }
5901 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005902 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5903 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5904 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005905 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005906 } else {
5907 // For non-Baker read barrier we have a temp-clobbering call.
5908 }
5909 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005910}
5911
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005912Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005913 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005914 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005915 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005916 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005917 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005918 PatchInfo<Label>* info = &jit_class_patches_.back();
5919 return &info->label;
5920}
5921
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005922// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5923// move.
5924void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005925 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005926 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005927 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005928 return;
5929 }
Vladimir Marko41559982017-01-06 14:04:23 +00005930 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005931
Vladimir Marko41559982017-01-06 14:04:23 +00005932 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005933 Location out_loc = locations->Out();
5934 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005935
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005936 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5937 ? kWithoutReadBarrier
5938 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005939 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005940 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005941 case HLoadClass::LoadKind::kReferrersClass: {
5942 DCHECK(!cls->CanCallRuntime());
5943 DCHECK(!cls->MustGenerateClinitCheck());
5944 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5945 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5946 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005947 cls,
5948 out_loc,
5949 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08005950 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005951 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005952 break;
5953 }
5954 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005955 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005956 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Andreas Gampe3db70682018-12-26 15:12:03 -08005957 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005958 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005959 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005960 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005961 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08005962 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005963 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005964 break;
5965 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005966 case HLoadClass::LoadKind::kBssEntry: {
5967 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08005968 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005969 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
5970 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
5971 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
5972 generate_null_check = true;
5973 break;
5974 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005975 case HLoadClass::LoadKind::kJitBootImageAddress: {
5976 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
5977 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
5978 DCHECK_NE(address, 0u);
5979 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
5980 break;
5981 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005982 case HLoadClass::LoadKind::kJitTableAddress: {
5983 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08005984 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005985 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005986 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005987 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00005988 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005989 break;
5990 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005991 default:
5992 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
5993 UNREACHABLE();
5994 }
5995
5996 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5997 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01005998 SlowPathCode* slow_path =
5999 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006000 codegen_->AddSlowPath(slow_path);
6001 if (generate_null_check) {
6002 __ testl(out, out);
6003 __ j(kEqual, slow_path->GetEntryLabel());
6004 }
6005 if (cls->MustGenerateClinitCheck()) {
6006 GenerateClassInitializationCheck(slow_path, out);
6007 } else {
6008 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006009 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006010 }
6011}
6012
6013void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6014 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006015 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006016 locations->SetInAt(0, Location::RequiresRegister());
6017 if (check->HasUses()) {
6018 locations->SetOut(Location::SameAsFirstInput());
6019 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006020 // Rely on the type initialization to save everything we need.
6021 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006022}
6023
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006024void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6025 // Custom calling convention: RAX serves as both input and output.
6026 Location location = Location::RegisterLocation(RAX);
6027 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6028}
6029
6030void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6031 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6032}
6033
Orion Hodson18259d72018-04-12 11:18:23 +01006034void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6035 // Custom calling convention: RAX serves as both input and output.
6036 Location location = Location::RegisterLocation(RAX);
6037 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6038}
6039
6040void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6041 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6042}
6043
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006044void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006045 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006046 SlowPathCode* slow_path =
6047 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006048 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006049 GenerateClassInitializationCheck(slow_path,
6050 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006051}
6052
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006053HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6054 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006055 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006056 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006057 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006058 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006059 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006060 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006061 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006062 case HLoadString::LoadKind::kJitTableAddress:
6063 DCHECK(Runtime::Current()->UseJitCompilation());
6064 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006065 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006066 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006067 }
6068 return desired_string_load_kind;
6069}
6070
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006071void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006072 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006073 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006074 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006075 locations->SetOut(Location::RegisterLocation(RAX));
6076 } else {
6077 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006078 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6079 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006080 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006081 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006082 } else {
6083 // For non-Baker read barrier we have a temp-clobbering call.
6084 }
6085 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006086 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006087}
6088
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006089Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006090 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006091 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006092 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006093 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006094 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006095 PatchInfo<Label>* info = &jit_string_patches_.back();
6096 return &info->label;
6097}
6098
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006099// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6100// move.
6101void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006102 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006103 Location out_loc = locations->Out();
6104 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006105
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006106 switch (load->GetLoadKind()) {
6107 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006108 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006109 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006110 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006111 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006112 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006113 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006114 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006115 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006116 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006117 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006118 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006119 case HLoadString::LoadKind::kBssEntry: {
6120 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006121 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006122 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6123 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006124 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Marko174b2e22017-10-12 13:34:49 +01006125 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006126 codegen_->AddSlowPath(slow_path);
6127 __ testl(out, out);
6128 __ j(kEqual, slow_path->GetEntryLabel());
6129 __ Bind(slow_path->GetExitLabel());
6130 return;
6131 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006132 case HLoadString::LoadKind::kJitBootImageAddress: {
6133 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6134 DCHECK_NE(address, 0u);
6135 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6136 return;
6137 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006138 case HLoadString::LoadKind::kJitTableAddress: {
6139 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006140 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006141 Label* fixup_label = codegen_->NewJitRootStringPatch(
6142 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006143 // /* GcRoot<mirror::String> */ out = *address
6144 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6145 return;
6146 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006147 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006148 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006149 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006150
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006151 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006152 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006153 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006154 codegen_->InvokeRuntime(kQuickResolveString,
6155 load,
6156 load->GetDexPc());
6157 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006158}
6159
David Brazdilcb1c0552015-08-04 16:22:25 +01006160static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006161 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006162 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006163}
6164
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006165void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6166 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006167 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006168 locations->SetOut(Location::RequiresRegister());
6169}
6170
6171void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006172 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6173}
6174
6175void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006176 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006177}
6178
6179void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6180 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006181}
6182
6183void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006184 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6185 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006186 InvokeRuntimeCallingConvention calling_convention;
6187 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6188}
6189
6190void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006191 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006192 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006193}
6194
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006195// Temp is used for read barrier.
6196static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6197 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006198 !kUseBakerReadBarrier &&
6199 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006200 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006201 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6202 return 1;
6203 }
6204 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006205}
6206
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006207// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6208// interface pointer, the current interface is compared in memory.
6209// The other checks have one temp for loading the object's class.
6210static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6211 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6212 return 2;
6213 }
6214 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006215}
6216
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006217void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006218 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006219 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006220 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006221 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006222 case TypeCheckKind::kExactCheck:
6223 case TypeCheckKind::kAbstractClassCheck:
6224 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006225 case TypeCheckKind::kArrayObjectCheck: {
6226 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6227 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6228 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006229 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006230 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006231 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006232 case TypeCheckKind::kUnresolvedCheck:
6233 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006234 call_kind = LocationSummary::kCallOnSlowPath;
6235 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006236 case TypeCheckKind::kBitstringCheck:
6237 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006238 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006239
Vladimir Markoca6fff82017-10-03 14:49:14 +01006240 LocationSummary* locations =
6241 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006242 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006243 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006244 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006245 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006246 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6247 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6248 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6249 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6250 } else {
6251 locations->SetInAt(1, Location::Any());
6252 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006253 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6254 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006255 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006256}
6257
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006258void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006259 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006260 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006261 Location obj_loc = locations->InAt(0);
6262 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006263 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006264 Location out_loc = locations->Out();
6265 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006266 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6267 DCHECK_LE(num_temps, 1u);
6268 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006269 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006270 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6271 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6272 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006273 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006274 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006275
6276 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006277 // Avoid null check if we know obj is not null.
6278 if (instruction->MustDoNullCheck()) {
6279 __ testl(obj, obj);
6280 __ j(kEqual, &zero);
6281 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006282
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006283 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006284 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006285 ReadBarrierOption read_barrier_option =
6286 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006287 // /* HeapReference<Class> */ out = obj->klass_
6288 GenerateReferenceLoadTwoRegisters(instruction,
6289 out_loc,
6290 obj_loc,
6291 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006292 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006293 if (cls.IsRegister()) {
6294 __ cmpl(out, cls.AsRegister<CpuRegister>());
6295 } else {
6296 DCHECK(cls.IsStackSlot()) << cls;
6297 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6298 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006299 if (zero.IsLinked()) {
6300 // Classes must be equal for the instanceof to succeed.
6301 __ j(kNotEqual, &zero);
6302 __ movl(out, Immediate(1));
6303 __ jmp(&done);
6304 } else {
6305 __ setcc(kEqual, out);
6306 // setcc only sets the low byte.
6307 __ andl(out, Immediate(1));
6308 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006309 break;
6310 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006311
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006312 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006313 ReadBarrierOption read_barrier_option =
6314 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006315 // /* HeapReference<Class> */ out = obj->klass_
6316 GenerateReferenceLoadTwoRegisters(instruction,
6317 out_loc,
6318 obj_loc,
6319 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006320 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006321 // If the class is abstract, we eagerly fetch the super class of the
6322 // object to avoid doing a comparison we know will fail.
6323 NearLabel loop, success;
6324 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006325 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006326 GenerateReferenceLoadOneRegister(instruction,
6327 out_loc,
6328 super_offset,
6329 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006330 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006331 __ testl(out, out);
6332 // If `out` is null, we use it for the result, and jump to `done`.
6333 __ j(kEqual, &done);
6334 if (cls.IsRegister()) {
6335 __ cmpl(out, cls.AsRegister<CpuRegister>());
6336 } else {
6337 DCHECK(cls.IsStackSlot()) << cls;
6338 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6339 }
6340 __ j(kNotEqual, &loop);
6341 __ movl(out, Immediate(1));
6342 if (zero.IsLinked()) {
6343 __ jmp(&done);
6344 }
6345 break;
6346 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006347
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006348 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006349 ReadBarrierOption read_barrier_option =
6350 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006351 // /* HeapReference<Class> */ out = obj->klass_
6352 GenerateReferenceLoadTwoRegisters(instruction,
6353 out_loc,
6354 obj_loc,
6355 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006356 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006357 // Walk over the class hierarchy to find a match.
6358 NearLabel loop, success;
6359 __ Bind(&loop);
6360 if (cls.IsRegister()) {
6361 __ cmpl(out, cls.AsRegister<CpuRegister>());
6362 } else {
6363 DCHECK(cls.IsStackSlot()) << cls;
6364 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6365 }
6366 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006367 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006368 GenerateReferenceLoadOneRegister(instruction,
6369 out_loc,
6370 super_offset,
6371 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006372 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006373 __ testl(out, out);
6374 __ j(kNotEqual, &loop);
6375 // If `out` is null, we use it for the result, and jump to `done`.
6376 __ jmp(&done);
6377 __ Bind(&success);
6378 __ movl(out, Immediate(1));
6379 if (zero.IsLinked()) {
6380 __ jmp(&done);
6381 }
6382 break;
6383 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006384
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006385 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006386 ReadBarrierOption read_barrier_option =
6387 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006388 // /* HeapReference<Class> */ out = obj->klass_
6389 GenerateReferenceLoadTwoRegisters(instruction,
6390 out_loc,
6391 obj_loc,
6392 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006393 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006394 // Do an exact check.
6395 NearLabel exact_check;
6396 if (cls.IsRegister()) {
6397 __ cmpl(out, cls.AsRegister<CpuRegister>());
6398 } else {
6399 DCHECK(cls.IsStackSlot()) << cls;
6400 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6401 }
6402 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006403 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006404 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006405 GenerateReferenceLoadOneRegister(instruction,
6406 out_loc,
6407 component_offset,
6408 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006409 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006410 __ testl(out, out);
6411 // If `out` is null, we use it for the result, and jump to `done`.
6412 __ j(kEqual, &done);
6413 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6414 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006415 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006416 __ movl(out, Immediate(1));
6417 __ jmp(&done);
6418 break;
6419 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006420
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006421 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006422 // No read barrier since the slow path will retry upon failure.
6423 // /* HeapReference<Class> */ out = obj->klass_
6424 GenerateReferenceLoadTwoRegisters(instruction,
6425 out_loc,
6426 obj_loc,
6427 class_offset,
6428 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006429 if (cls.IsRegister()) {
6430 __ cmpl(out, cls.AsRegister<CpuRegister>());
6431 } else {
6432 DCHECK(cls.IsStackSlot()) << cls;
6433 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6434 }
6435 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006436 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006437 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006438 codegen_->AddSlowPath(slow_path);
6439 __ j(kNotEqual, slow_path->GetEntryLabel());
6440 __ movl(out, Immediate(1));
6441 if (zero.IsLinked()) {
6442 __ jmp(&done);
6443 }
6444 break;
6445 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006446
Calin Juravle98893e12015-10-02 21:05:03 +01006447 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006448 case TypeCheckKind::kInterfaceCheck: {
6449 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006450 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006451 // cases.
6452 //
6453 // We cannot directly call the InstanceofNonTrivial runtime
6454 // entry point without resorting to a type checking slow path
6455 // here (i.e. by calling InvokeRuntime directly), as it would
6456 // require to assign fixed registers for the inputs of this
6457 // HInstanceOf instruction (following the runtime calling
6458 // convention), which might be cluttered by the potential first
6459 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006460 //
6461 // TODO: Introduce a new runtime entry point taking the object
6462 // to test (instead of its class) as argument, and let it deal
6463 // with the read barrier issues. This will let us refactor this
6464 // case of the `switch` code as it was previously (with a direct
6465 // call to the runtime not using a type checking slow path).
6466 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006467 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006468 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006469 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006470 codegen_->AddSlowPath(slow_path);
6471 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006472 if (zero.IsLinked()) {
6473 __ jmp(&done);
6474 }
6475 break;
6476 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006477
6478 case TypeCheckKind::kBitstringCheck: {
6479 // /* HeapReference<Class> */ temp = obj->klass_
6480 GenerateReferenceLoadTwoRegisters(instruction,
6481 out_loc,
6482 obj_loc,
6483 class_offset,
6484 kWithoutReadBarrier);
6485
6486 GenerateBitstringTypeCheckCompare(instruction, out);
6487 if (zero.IsLinked()) {
6488 __ j(kNotEqual, &zero);
6489 __ movl(out, Immediate(1));
6490 __ jmp(&done);
6491 } else {
6492 __ setcc(kEqual, out);
6493 // setcc only sets the low byte.
6494 __ andl(out, Immediate(1));
6495 }
6496 break;
6497 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006498 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006499
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006500 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006501 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006502 __ xorl(out, out);
6503 }
6504
6505 if (done.IsLinked()) {
6506 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006507 }
6508
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006509 if (slow_path != nullptr) {
6510 __ Bind(slow_path->GetExitLabel());
6511 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006512}
6513
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006514void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006515 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006516 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006517 LocationSummary* locations =
6518 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006519 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006520 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6521 // Require a register for the interface check since there is a loop that compares the class to
6522 // a memory address.
6523 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006524 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6525 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6526 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6527 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006528 } else {
6529 locations->SetInAt(1, Location::Any());
6530 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006531 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6532 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006533}
6534
6535void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006536 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006537 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006538 Location obj_loc = locations->InAt(0);
6539 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006540 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006541 Location temp_loc = locations->GetTemp(0);
6542 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006543 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6544 DCHECK_GE(num_temps, 1u);
6545 DCHECK_LE(num_temps, 2u);
6546 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006547 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6548 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6549 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6550 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6551 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6552 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006553 const uint32_t object_array_data_offset =
6554 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006555
Vladimir Marko87584542017-12-12 17:47:52 +00006556 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006557 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006558 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6559 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006560 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006561
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006562
6563 NearLabel done;
6564 // Avoid null check if we know obj is not null.
6565 if (instruction->MustDoNullCheck()) {
6566 __ testl(obj, obj);
6567 __ j(kEqual, &done);
6568 }
6569
Roland Levillain0d5a2812015-11-13 10:07:31 +00006570 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006571 case TypeCheckKind::kExactCheck:
6572 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006573 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006574 GenerateReferenceLoadTwoRegisters(instruction,
6575 temp_loc,
6576 obj_loc,
6577 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006578 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006579 if (cls.IsRegister()) {
6580 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6581 } else {
6582 DCHECK(cls.IsStackSlot()) << cls;
6583 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6584 }
6585 // Jump to slow path for throwing the exception or doing a
6586 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006587 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006588 break;
6589 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006590
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006591 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006592 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006593 GenerateReferenceLoadTwoRegisters(instruction,
6594 temp_loc,
6595 obj_loc,
6596 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006597 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006598 // If the class is abstract, we eagerly fetch the super class of the
6599 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006600 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006601 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006602 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006603 GenerateReferenceLoadOneRegister(instruction,
6604 temp_loc,
6605 super_offset,
6606 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006607 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006608
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006609 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6610 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006611 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006612 // Otherwise, compare the classes.
6613 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006614 if (cls.IsRegister()) {
6615 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6616 } else {
6617 DCHECK(cls.IsStackSlot()) << cls;
6618 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6619 }
6620 __ j(kNotEqual, &loop);
6621 break;
6622 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006623
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006624 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006625 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006626 GenerateReferenceLoadTwoRegisters(instruction,
6627 temp_loc,
6628 obj_loc,
6629 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006630 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006631 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006632 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006633 __ Bind(&loop);
6634 if (cls.IsRegister()) {
6635 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6636 } else {
6637 DCHECK(cls.IsStackSlot()) << cls;
6638 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6639 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006640 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006641
Roland Levillain0d5a2812015-11-13 10:07:31 +00006642 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006643 GenerateReferenceLoadOneRegister(instruction,
6644 temp_loc,
6645 super_offset,
6646 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006647 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006648
6649 // If the class reference currently in `temp` is not null, jump
6650 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006651 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006652 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006653 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006654 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006655 break;
6656 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006657
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006658 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006659 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006660 GenerateReferenceLoadTwoRegisters(instruction,
6661 temp_loc,
6662 obj_loc,
6663 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006664 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006665 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006666 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006667 if (cls.IsRegister()) {
6668 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6669 } else {
6670 DCHECK(cls.IsStackSlot()) << cls;
6671 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6672 }
6673 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006674
6675 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006676 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006677 GenerateReferenceLoadOneRegister(instruction,
6678 temp_loc,
6679 component_offset,
6680 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006681 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006682
6683 // If the component type is not null (i.e. the object is indeed
6684 // an array), jump to label `check_non_primitive_component_type`
6685 // to further check that this component type is not a primitive
6686 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006687 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006688 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006689 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006690 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006691 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006692 break;
6693 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006694
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006695 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006696 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006697 //
6698 // We cannot directly call the CheckCast runtime entry point
6699 // without resorting to a type checking slow path here (i.e. by
6700 // calling InvokeRuntime directly), as it would require to
6701 // assign fixed registers for the inputs of this HInstanceOf
6702 // instruction (following the runtime calling convention), which
6703 // might be cluttered by the potential first read barrier
6704 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006705 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006706 break;
6707 }
6708
Vladimir Marko175e7862018-03-27 09:03:13 +00006709 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006710 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6711 // We can not get false positives by doing this.
6712 // /* HeapReference<Class> */ temp = obj->klass_
6713 GenerateReferenceLoadTwoRegisters(instruction,
6714 temp_loc,
6715 obj_loc,
6716 class_offset,
6717 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006718
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006719 // /* HeapReference<Class> */ temp = temp->iftable_
6720 GenerateReferenceLoadTwoRegisters(instruction,
6721 temp_loc,
6722 temp_loc,
6723 iftable_offset,
6724 kWithoutReadBarrier);
6725 // Iftable is never null.
6726 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6727 // Maybe poison the `cls` for direct comparison with memory.
6728 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6729 // Loop through the iftable and check if any class matches.
6730 NearLabel start_loop;
6731 __ Bind(&start_loop);
6732 // Need to subtract first to handle the empty array case.
6733 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6734 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6735 // Go to next interface if the classes do not match.
6736 __ cmpl(cls.AsRegister<CpuRegister>(),
6737 CodeGeneratorX86_64::ArrayAddress(temp,
6738 maybe_temp2_loc,
6739 TIMES_4,
6740 object_array_data_offset));
6741 __ j(kNotEqual, &start_loop); // Return if same class.
6742 // If `cls` was poisoned above, unpoison it.
6743 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006744 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006745 }
6746
6747 case TypeCheckKind::kBitstringCheck: {
6748 // /* HeapReference<Class> */ temp = obj->klass_
6749 GenerateReferenceLoadTwoRegisters(instruction,
6750 temp_loc,
6751 obj_loc,
6752 class_offset,
6753 kWithoutReadBarrier);
6754
6755 GenerateBitstringTypeCheckCompare(instruction, temp);
6756 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6757 break;
6758 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006759 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006760
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006761 if (done.IsLinked()) {
6762 __ Bind(&done);
6763 }
6764
Roland Levillain0d5a2812015-11-13 10:07:31 +00006765 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006766}
6767
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006768void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006769 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6770 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006771 InvokeRuntimeCallingConvention calling_convention;
6772 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6773}
6774
6775void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006776 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006777 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006778 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006779 if (instruction->IsEnter()) {
6780 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6781 } else {
6782 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6783 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006784}
6785
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05306786void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6787 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6788 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6789 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6790 locations->SetInAt(0, Location::RequiresRegister());
6791 // There is no immediate variant of negated bitwise and in X86.
6792 locations->SetInAt(1, Location::RequiresRegister());
6793 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6794}
6795
6796void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6797 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6798 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6799 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6800 locations->SetInAt(0, Location::RequiresRegister());
6801 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6802}
6803
6804void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6805 LocationSummary* locations = instruction->GetLocations();
6806 Location first = locations->InAt(0);
6807 Location second = locations->InAt(1);
6808 Location dest = locations->Out();
6809 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
6810}
6811
6812void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6813 LocationSummary* locations = instruction->GetLocations();
6814 Location src = locations->InAt(0);
6815 Location dest = locations->Out();
6816 switch (instruction->GetOpKind()) {
6817 case HInstruction::kAnd:
6818 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6819 break;
6820 case HInstruction::kXor:
6821 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6822 break;
6823 default:
6824 LOG(FATAL) << "Unreachable";
6825 }
6826}
6827
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006828void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6829void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6830void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6831
6832void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6833 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006834 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006835 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6836 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006837 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006838 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006839 locations->SetOut(Location::SameAsFirstInput());
6840}
6841
6842void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6843 HandleBitwiseOperation(instruction);
6844}
6845
6846void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6847 HandleBitwiseOperation(instruction);
6848}
6849
6850void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6851 HandleBitwiseOperation(instruction);
6852}
6853
6854void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6855 LocationSummary* locations = instruction->GetLocations();
6856 Location first = locations->InAt(0);
6857 Location second = locations->InAt(1);
6858 DCHECK(first.Equals(locations->Out()));
6859
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006860 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006861 if (second.IsRegister()) {
6862 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006863 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006864 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006865 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006866 } else {
6867 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006868 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006869 }
6870 } else if (second.IsConstant()) {
6871 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6872 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006873 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006874 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006875 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006876 } else {
6877 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006878 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006879 }
6880 } else {
6881 Address address(CpuRegister(RSP), second.GetStackIndex());
6882 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006883 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006884 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006885 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006886 } else {
6887 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006888 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006889 }
6890 }
6891 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006892 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006893 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6894 bool second_is_constant = false;
6895 int64_t value = 0;
6896 if (second.IsConstant()) {
6897 second_is_constant = true;
6898 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006899 }
Mark Mendell40741f32015-04-20 22:10:34 -04006900 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006901
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006902 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006903 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006904 if (is_int32_value) {
6905 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6906 } else {
6907 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6908 }
6909 } else if (second.IsDoubleStackSlot()) {
6910 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006911 } else {
6912 __ andq(first_reg, second.AsRegister<CpuRegister>());
6913 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006914 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006915 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006916 if (is_int32_value) {
6917 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6918 } else {
6919 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6920 }
6921 } else if (second.IsDoubleStackSlot()) {
6922 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006923 } else {
6924 __ orq(first_reg, second.AsRegister<CpuRegister>());
6925 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006926 } else {
6927 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006928 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006929 if (is_int32_value) {
6930 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6931 } else {
6932 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6933 }
6934 } else if (second.IsDoubleStackSlot()) {
6935 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006936 } else {
6937 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6938 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006939 }
6940 }
6941}
6942
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006943void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
6944 HInstruction* instruction,
6945 Location out,
6946 uint32_t offset,
6947 Location maybe_temp,
6948 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006949 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006950 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006951 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006952 if (kUseBakerReadBarrier) {
6953 // Load with fast path based Baker's read barrier.
6954 // /* HeapReference<Object> */ out = *(out + offset)
6955 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08006956 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006957 } else {
6958 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006959 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006960 // in the following move operation, as we will need it for the
6961 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006962 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006963 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006964 // /* HeapReference<Object> */ out = *(out + offset)
6965 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006966 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006967 }
6968 } else {
6969 // Plain load with no read barrier.
6970 // /* HeapReference<Object> */ out = *(out + offset)
6971 __ movl(out_reg, Address(out_reg, offset));
6972 __ MaybeUnpoisonHeapReference(out_reg);
6973 }
6974}
6975
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006976void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
6977 HInstruction* instruction,
6978 Location out,
6979 Location obj,
6980 uint32_t offset,
6981 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006982 CpuRegister out_reg = out.AsRegister<CpuRegister>();
6983 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006984 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006985 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006986 if (kUseBakerReadBarrier) {
6987 // Load with fast path based Baker's read barrier.
6988 // /* HeapReference<Object> */ out = *(obj + offset)
6989 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08006990 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006991 } else {
6992 // Load with slow path based read barrier.
6993 // /* HeapReference<Object> */ out = *(obj + offset)
6994 __ movl(out_reg, Address(obj_reg, offset));
6995 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6996 }
6997 } else {
6998 // Plain load with no read barrier.
6999 // /* HeapReference<Object> */ out = *(obj + offset)
7000 __ movl(out_reg, Address(obj_reg, offset));
7001 __ MaybeUnpoisonHeapReference(out_reg);
7002 }
7003}
7004
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007005void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7006 HInstruction* instruction,
7007 Location root,
7008 const Address& address,
7009 Label* fixup_label,
7010 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007011 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007012 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007013 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007014 if (kUseBakerReadBarrier) {
7015 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7016 // Baker's read barrier are used:
7017 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007018 // root = obj.field;
7019 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7020 // if (temp != null) {
7021 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007022 // }
7023
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007024 // /* GcRoot<mirror::Object> */ root = *address
7025 __ movl(root_reg, address);
7026 if (fixup_label != nullptr) {
7027 __ Bind(fixup_label);
7028 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007029 static_assert(
7030 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7031 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7032 "have different sizes.");
7033 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7034 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7035 "have different sizes.");
7036
Vladimir Marko953437b2016-08-24 08:30:46 +00007037 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007038 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007039 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007040 codegen_->AddSlowPath(slow_path);
7041
Roland Levillaind966ce72017-02-09 16:20:14 +00007042 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7043 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007044 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007045 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007046 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007047 __ j(kNotEqual, slow_path->GetEntryLabel());
7048 __ Bind(slow_path->GetExitLabel());
7049 } else {
7050 // GC root loaded through a slow path for read barriers other
7051 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007052 // /* GcRoot<mirror::Object>* */ root = address
7053 __ leaq(root_reg, address);
7054 if (fixup_label != nullptr) {
7055 __ Bind(fixup_label);
7056 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007057 // /* mirror::Object* */ root = root->Read()
7058 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7059 }
7060 } else {
7061 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007062 // /* GcRoot<mirror::Object> */ root = *address
7063 __ movl(root_reg, address);
7064 if (fixup_label != nullptr) {
7065 __ Bind(fixup_label);
7066 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007067 // Note that GC roots are not affected by heap poisoning, thus we
7068 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007069 }
7070}
7071
7072void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7073 Location ref,
7074 CpuRegister obj,
7075 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007076 bool needs_null_check) {
7077 DCHECK(kEmitCompilerReadBarrier);
7078 DCHECK(kUseBakerReadBarrier);
7079
7080 // /* HeapReference<Object> */ ref = *(obj + offset)
7081 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007082 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007083}
7084
7085void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7086 Location ref,
7087 CpuRegister obj,
7088 uint32_t data_offset,
7089 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007090 bool needs_null_check) {
7091 DCHECK(kEmitCompilerReadBarrier);
7092 DCHECK(kUseBakerReadBarrier);
7093
Roland Levillain3d312422016-06-23 13:53:42 +01007094 static_assert(
7095 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7096 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007097 // /* HeapReference<Object> */ ref =
7098 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007099 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007100 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007101}
7102
7103void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7104 Location ref,
7105 CpuRegister obj,
7106 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007107 bool needs_null_check,
7108 bool always_update_field,
7109 CpuRegister* temp1,
7110 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007111 DCHECK(kEmitCompilerReadBarrier);
7112 DCHECK(kUseBakerReadBarrier);
7113
7114 // In slow path based read barriers, the read barrier call is
7115 // inserted after the original load. However, in fast path based
7116 // Baker's read barriers, we need to perform the load of
7117 // mirror::Object::monitor_ *before* the original reference load.
7118 // This load-load ordering is required by the read barrier.
7119 // The fast path/slow path (for Baker's algorithm) should look like:
7120 //
7121 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7122 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7123 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007124 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007125 // if (is_gray) {
7126 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7127 // }
7128 //
7129 // Note: the original implementation in ReadBarrier::Barrier is
7130 // slightly more complex as:
7131 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007132 // the high-bits of rb_state, which are expected to be all zeroes
7133 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7134 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007135 // - it performs additional checks that we do not do here for
7136 // performance reasons.
7137
7138 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007139 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7140
Vladimir Marko953437b2016-08-24 08:30:46 +00007141 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007142 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007143 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007144 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7145 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7146 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7147
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007148 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007149 // ref = ReadBarrier::Mark(ref);
7150 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7151 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007152 if (needs_null_check) {
7153 MaybeRecordImplicitNullCheck(instruction);
7154 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007155
7156 // Load fence to prevent load-load reordering.
7157 // Note that this is a no-op, thanks to the x86-64 memory model.
7158 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7159
7160 // The actual reference load.
7161 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007162 __ movl(ref_reg, src); // Flags are unaffected.
7163
7164 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7165 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007166 SlowPathCode* slow_path;
7167 if (always_update_field) {
7168 DCHECK(temp1 != nullptr);
7169 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007170 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007171 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007172 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007173 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007174 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007175 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007176 AddSlowPath(slow_path);
7177
7178 // We have done the "if" of the gray bit check above, now branch based on the flags.
7179 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007180
7181 // Object* ref = ref_addr->AsMirrorPtr()
7182 __ MaybeUnpoisonHeapReference(ref_reg);
7183
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007184 __ Bind(slow_path->GetExitLabel());
7185}
7186
7187void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7188 Location out,
7189 Location ref,
7190 Location obj,
7191 uint32_t offset,
7192 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007193 DCHECK(kEmitCompilerReadBarrier);
7194
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007195 // Insert a slow path based read barrier *after* the reference load.
7196 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007197 // If heap poisoning is enabled, the unpoisoning of the loaded
7198 // reference will be carried out by the runtime within the slow
7199 // path.
7200 //
7201 // Note that `ref` currently does not get unpoisoned (when heap
7202 // poisoning is enabled), which is alright as the `ref` argument is
7203 // not used by the artReadBarrierSlow entry point.
7204 //
7205 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007206 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007207 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7208 AddSlowPath(slow_path);
7209
Roland Levillain0d5a2812015-11-13 10:07:31 +00007210 __ jmp(slow_path->GetEntryLabel());
7211 __ Bind(slow_path->GetExitLabel());
7212}
7213
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007214void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7215 Location out,
7216 Location ref,
7217 Location obj,
7218 uint32_t offset,
7219 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007220 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007221 // Baker's read barriers shall be handled by the fast path
7222 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7223 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007224 // If heap poisoning is enabled, unpoisoning will be taken care of
7225 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007226 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007227 } else if (kPoisonHeapReferences) {
7228 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7229 }
7230}
7231
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007232void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7233 Location out,
7234 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007235 DCHECK(kEmitCompilerReadBarrier);
7236
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007237 // Insert a slow path based read barrier *after* the GC root load.
7238 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007239 // Note that GC roots are not affected by heap poisoning, so we do
7240 // not need to do anything special for this here.
7241 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007242 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007243 AddSlowPath(slow_path);
7244
Roland Levillain0d5a2812015-11-13 10:07:31 +00007245 __ jmp(slow_path->GetEntryLabel());
7246 __ Bind(slow_path->GetExitLabel());
7247}
7248
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007249void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007250 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007251 LOG(FATAL) << "Unreachable";
7252}
7253
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007254void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007255 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007256 LOG(FATAL) << "Unreachable";
7257}
7258
Mark Mendellfe57faa2015-09-18 09:26:15 -04007259// Simple implementation of packed switch - generate cascaded compare/jumps.
7260void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7261 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007262 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007263 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007264 locations->AddTemp(Location::RequiresRegister());
7265 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007266}
7267
7268void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7269 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007270 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007271 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007272 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7273 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7274 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007275 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7276
7277 // Should we generate smaller inline compare/jumps?
7278 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7279 // Figure out the correct compare values and jump conditions.
7280 // Handle the first compare/branch as a special case because it might
7281 // jump to the default case.
7282 DCHECK_GT(num_entries, 2u);
7283 Condition first_condition;
7284 uint32_t index;
7285 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7286 if (lower_bound != 0) {
7287 first_condition = kLess;
7288 __ cmpl(value_reg_in, Immediate(lower_bound));
7289 __ j(first_condition, codegen_->GetLabelOf(default_block));
7290 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7291
7292 index = 1;
7293 } else {
7294 // Handle all the compare/jumps below.
7295 first_condition = kBelow;
7296 index = 0;
7297 }
7298
7299 // Handle the rest of the compare/jumps.
7300 for (; index + 1 < num_entries; index += 2) {
7301 int32_t compare_to_value = lower_bound + index + 1;
7302 __ cmpl(value_reg_in, Immediate(compare_to_value));
7303 // Jump to successors[index] if value < case_value[index].
7304 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7305 // Jump to successors[index + 1] if value == case_value[index + 1].
7306 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7307 }
7308
7309 if (index != num_entries) {
7310 // There are an odd number of entries. Handle the last one.
7311 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007312 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007313 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7314 }
7315
7316 // And the default for any other value.
7317 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7318 __ jmp(codegen_->GetLabelOf(default_block));
7319 }
7320 return;
7321 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007322
7323 // Remove the bias, if needed.
7324 Register value_reg_out = value_reg_in.AsRegister();
7325 if (lower_bound != 0) {
7326 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7327 value_reg_out = temp_reg.AsRegister();
7328 }
7329 CpuRegister value_reg(value_reg_out);
7330
7331 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007332 __ cmpl(value_reg, Immediate(num_entries - 1));
7333 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007334
Mark Mendell9c86b482015-09-18 13:36:07 -04007335 // We are in the range of the table.
7336 // Load the address of the jump table in the constant area.
7337 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007338
Mark Mendell9c86b482015-09-18 13:36:07 -04007339 // Load the (signed) offset from the jump table.
7340 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7341
7342 // Add the offset to the address of the table base.
7343 __ addq(temp_reg, base_reg);
7344
7345 // And jump.
7346 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007347}
7348
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007349void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7350 ATTRIBUTE_UNUSED) {
7351 LOG(FATAL) << "Unreachable";
7352}
7353
7354void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7355 ATTRIBUTE_UNUSED) {
7356 LOG(FATAL) << "Unreachable";
7357}
7358
Aart Bikc5d47542016-01-27 17:00:35 -08007359void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7360 if (value == 0) {
7361 __ xorl(dest, dest);
7362 } else {
7363 __ movl(dest, Immediate(value));
7364 }
7365}
7366
Mark Mendell92e83bf2015-05-07 11:25:03 -04007367void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7368 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007369 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007370 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007371 } else if (IsUint<32>(value)) {
7372 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007373 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7374 } else {
7375 __ movq(dest, Immediate(value));
7376 }
7377}
7378
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007379void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7380 if (value == 0) {
7381 __ xorps(dest, dest);
7382 } else {
7383 __ movss(dest, LiteralInt32Address(value));
7384 }
7385}
7386
7387void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7388 if (value == 0) {
7389 __ xorpd(dest, dest);
7390 } else {
7391 __ movsd(dest, LiteralInt64Address(value));
7392 }
7393}
7394
7395void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7396 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7397}
7398
7399void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7400 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7401}
7402
Aart Bika19616e2016-02-01 18:57:58 -08007403void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7404 if (value == 0) {
7405 __ testl(dest, dest);
7406 } else {
7407 __ cmpl(dest, Immediate(value));
7408 }
7409}
7410
7411void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7412 if (IsInt<32>(value)) {
7413 if (value == 0) {
7414 __ testq(dest, dest);
7415 } else {
7416 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7417 }
7418 } else {
7419 // Value won't fit in an int.
7420 __ cmpq(dest, LiteralInt64Address(value));
7421 }
7422}
7423
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007424void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7425 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007426 GenerateIntCompare(lhs_reg, rhs);
7427}
7428
7429void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007430 if (rhs.IsConstant()) {
7431 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007432 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007433 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007434 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007435 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007436 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007437 }
7438}
7439
7440void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7441 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7442 if (rhs.IsConstant()) {
7443 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7444 Compare64BitValue(lhs_reg, value);
7445 } else if (rhs.IsDoubleStackSlot()) {
7446 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7447 } else {
7448 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7449 }
7450}
7451
7452Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7453 Location index,
7454 ScaleFactor scale,
7455 uint32_t data_offset) {
7456 return index.IsConstant() ?
7457 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7458 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7459}
7460
Mark Mendellcfa410b2015-05-25 16:02:44 -04007461void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7462 DCHECK(dest.IsDoubleStackSlot());
7463 if (IsInt<32>(value)) {
7464 // Can move directly as an int32 constant.
7465 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7466 Immediate(static_cast<int32_t>(value)));
7467 } else {
7468 Load64BitValue(CpuRegister(TMP), value);
7469 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7470 }
7471}
7472
Mark Mendell9c86b482015-09-18 13:36:07 -04007473/**
7474 * Class to handle late fixup of offsets into constant area.
7475 */
7476class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7477 public:
7478 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7479 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7480
7481 protected:
7482 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7483
7484 CodeGeneratorX86_64* codegen_;
7485
7486 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007487 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007488 // Patch the correct offset for the instruction. We use the address of the
7489 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7490 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7491 int32_t relative_position = constant_offset - pos;
7492
7493 // Patch in the right value.
7494 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7495 }
7496
7497 // Location in constant area that the fixup refers to.
7498 size_t offset_into_constant_area_;
7499};
7500
7501/**
7502 t * Class to handle late fixup of offsets to a jump table that will be created in the
7503 * constant area.
7504 */
7505class JumpTableRIPFixup : public RIPFixup {
7506 public:
7507 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7508 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7509
7510 void CreateJumpTable() {
7511 X86_64Assembler* assembler = codegen_->GetAssembler();
7512
7513 // Ensure that the reference to the jump table has the correct offset.
7514 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7515 SetOffset(offset_in_constant_table);
7516
7517 // Compute the offset from the start of the function to this jump table.
7518 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7519
7520 // Populate the jump table with the correct values for the jump table.
7521 int32_t num_entries = switch_instr_->GetNumEntries();
7522 HBasicBlock* block = switch_instr_->GetBlock();
7523 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7524 // The value that we want is the target offset - the position of the table.
7525 for (int32_t i = 0; i < num_entries; i++) {
7526 HBasicBlock* b = successors[i];
7527 Label* l = codegen_->GetLabelOf(b);
7528 DCHECK(l->IsBound());
7529 int32_t offset_to_block = l->Position() - current_table_offset;
7530 assembler->AppendInt32(offset_to_block);
7531 }
7532 }
7533
7534 private:
7535 const HPackedSwitch* switch_instr_;
7536};
7537
Mark Mendellf55c3e02015-03-26 21:07:46 -04007538void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7539 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007540 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007541 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7542 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007543 assembler->Align(4, 0);
7544 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007545
7546 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007547 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007548 jump_table->CreateJumpTable();
7549 }
7550
7551 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007552 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007553 }
7554
7555 // And finish up.
7556 CodeGenerator::Finalize(allocator);
7557}
7558
Mark Mendellf55c3e02015-03-26 21:07:46 -04007559Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007560 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007561 return Address::RIP(fixup);
7562}
7563
7564Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007565 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007566 return Address::RIP(fixup);
7567}
7568
7569Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007570 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007571 return Address::RIP(fixup);
7572}
7573
7574Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007575 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007576 return Address::RIP(fixup);
7577}
7578
Andreas Gampe85b62f22015-09-09 13:15:38 -07007579// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007580void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007581 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007582 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007583 return;
7584 }
7585
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007586 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007587
7588 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7589 if (trg.Equals(return_loc)) {
7590 return;
7591 }
7592
7593 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007594 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007595 parallel_move.AddMove(return_loc, trg, type, nullptr);
7596 GetMoveResolver()->EmitNativeCode(&parallel_move);
7597}
7598
Mark Mendell9c86b482015-09-18 13:36:07 -04007599Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7600 // Create a fixup to be used to create and address the jump table.
7601 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007602 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007603
7604 // We have to populate the jump tables.
7605 fixups_to_jump_tables_.push_back(table_fixup);
7606 return Address::RIP(table_fixup);
7607}
7608
Mark Mendellea5af682015-10-22 17:35:49 -04007609void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7610 const Address& addr_high,
7611 int64_t v,
7612 HInstruction* instruction) {
7613 if (IsInt<32>(v)) {
7614 int32_t v_32 = v;
7615 __ movq(addr_low, Immediate(v_32));
7616 MaybeRecordImplicitNullCheck(instruction);
7617 } else {
7618 // Didn't fit in a register. Do it in pieces.
7619 int32_t low_v = Low32Bits(v);
7620 int32_t high_v = High32Bits(v);
7621 __ movl(addr_low, Immediate(low_v));
7622 MaybeRecordImplicitNullCheck(instruction);
7623 __ movl(addr_high, Immediate(high_v));
7624 }
7625}
7626
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007627void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7628 const uint8_t* roots_data,
7629 const PatchInfo<Label>& info,
7630 uint64_t index_in_table) const {
7631 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7632 uintptr_t address =
7633 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007634 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007635 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7636 dchecked_integral_cast<uint32_t>(address);
7637}
7638
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007639void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7640 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007641 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007642 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007643 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007644 }
7645
7646 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007647 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007648 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007649 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007650 }
7651}
7652
Roland Levillain4d027112015-07-01 15:41:14 +01007653#undef __
7654
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007655} // namespace x86_64
7656} // namespace art