blob: 100a86b6a273b2f5f7502bbefe540247173b2a97 [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000019#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070026#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080027#include "intrinsics.h"
28#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000029#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010034#include "mirror/object_reference.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000035#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "thread.h"
37#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039#include "utils/x86_64/assembler_x86_64.h"
40#include "utils/x86_64/managed_register_x86_64.h"
41
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010043
Roland Levillain0d5a2812015-11-13 10:07:31 +000044template<class MirrorType>
45class GcRoot;
46
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010047namespace x86_64 {
48
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010049static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010050static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000051// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
52// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
53// generates less code/data with a small num_entries.
54static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010055
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000056static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000057static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010058
Mark Mendell24f2dfa2015-01-14 19:51:45 -050059static constexpr int kC2ConditionMask = 0x400;
60
Vladimir Marko3232dbb2018-07-25 15:42:46 +010061static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
62 // Custom calling convention: RAX serves as both input and output.
63 RegisterSet caller_saves = RegisterSet::Empty();
64 caller_saves.Add(Location::RegisterLocation(RAX));
65 return caller_saves;
66}
67
Roland Levillain7cbd27f2016-08-11 23:53:33 +010068// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
69#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070070#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071
Andreas Gampe85b62f22015-09-09 13:15:38 -070072class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000074 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010076 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000077 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000079 if (instruction_->CanThrowIntoCatchBlock()) {
80 // Live registers will be restored in the catch block if caught.
81 SaveLiveRegisters(codegen, instruction_->GetLocations());
82 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010083 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000084 instruction_,
85 instruction_->GetDexPc(),
86 this);
Roland Levillain888d0672015-11-23 18:53:50 +000087 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010088 }
89
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010090 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010091
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010092 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010093
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
96};
97
Andreas Gampe85b62f22015-09-09 13:15:38 -070098class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000099 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000100 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000101
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100102 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000103 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000104 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100105 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000106 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000107 }
108
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100109 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100112
Calin Juravled0d48522014-11-04 16:40:20 +0000113 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000114 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
115};
116
Andreas Gampe85b62f22015-09-09 13:15:38 -0700117class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000118 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100119 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000121
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100122 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000123 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100124 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negl(cpu_reg_);
127 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
130
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100132 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 if (is_div_) {
134 __ negq(cpu_reg_);
135 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400136 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 }
Calin Juravled0d48522014-11-04 16:40:20 +0000139 __ jmp(GetExitLabel());
140 }
141
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100142 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100143
Calin Juravled0d48522014-11-04 16:40:20 +0000144 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000145 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100146 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000147 const bool is_div_;
148 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000149};
150
Andreas Gampe85b62f22015-09-09 13:15:38 -0700151class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100153 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000154 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100156 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700157 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000158 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700160 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100161 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000162 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700163 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 if (successor_ == nullptr) {
165 __ jmp(GetReturnLabel());
166 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000167 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169 }
170
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100171 Label* GetReturnLabel() {
172 DCHECK(successor_ == nullptr);
173 return &return_label_;
174 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000175
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100176 HBasicBlock* GetSuccessor() const {
177 return successor_;
178 }
179
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100180 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100181
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000182 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100183 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184 Label return_label_;
185
186 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
187};
188
Andreas Gampe85b62f22015-09-09 13:15:38 -0700189class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100191 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000192 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100193
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100194 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100195 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000196 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000198 if (instruction_->CanThrowIntoCatchBlock()) {
199 // Live registers will be restored in the catch block if caught.
200 SaveLiveRegisters(codegen, instruction_->GetLocations());
201 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400202 // Are we using an array length from memory?
203 HInstruction* array_length = instruction_->InputAt(1);
204 Location length_loc = locations->InAt(1);
205 InvokeRuntimeCallingConvention calling_convention;
206 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
207 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100208 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100209 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400210 Location array_loc = array_length->GetLocations()->InAt(0);
211 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
212 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
213 // Check for conflicts with index.
214 if (length_loc.Equals(locations->InAt(0))) {
215 // We know we aren't using parameter 2.
216 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
217 }
218 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100219 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100220 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700221 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400222 }
223
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000224 // We're moving two locations to locations that could overlap, so we need a parallel
225 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100229 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400230 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100231 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100232 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100233 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
234 ? kQuickThrowStringBounds
235 : kQuickThrowArrayBounds;
236 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100237 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000238 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 }
240
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100241 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100242
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100243 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100244
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100245 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100246 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
247};
248
Andreas Gampe85b62f22015-09-09 13:15:38 -0700249class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100250 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100251 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
252 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100254 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100257 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100259 Location out = locations->Out();
260 const uint32_t dex_pc = instruction_->GetDexPc();
261 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
262 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
263
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000266 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267
Vladimir Markoea4c1262017-02-06 19:59:33 +0000268 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100269 if (must_resolve_type) {
270 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
271 dex::TypeIndex type_index = cls_->GetTypeIndex();
272 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100273 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
274 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 // If we also must_do_clinit, the resolved type is now in the correct register.
276 } else {
277 DCHECK(must_do_clinit);
278 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
279 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
280 }
281 if (must_do_clinit) {
282 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
283 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000284 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000287 if (out.IsValid()) {
288 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000289 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000290 }
291
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100293 __ jmp(GetExitLabel());
294 }
295
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100296 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100297
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100298 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 // The class this slow path will load.
300 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100301
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000302 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100303};
304
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305class LoadStringSlowPathX86_64 : public SlowPathCode {
306 public:
307 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
308
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100309 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000310 LocationSummary* locations = instruction_->GetLocations();
311 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
312
313 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
314 __ Bind(GetEntryLabel());
315 SaveLiveRegisters(codegen, locations);
316
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000317 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100318 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000319 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000320 x86_64_codegen->InvokeRuntime(kQuickResolveString,
321 instruction_,
322 instruction_->GetDexPc(),
323 this);
324 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
325 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
326 RestoreLiveRegisters(codegen, locations);
327
Vladimir Markoaad75c62016-10-03 08:46:48 +0000328 __ jmp(GetExitLabel());
329 }
330
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100331 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000332
333 private:
334 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
335};
336
Andreas Gampe85b62f22015-09-09 13:15:38 -0700337class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000351 if (kPoisonHeapReferences &&
352 instruction_->IsCheckCast() &&
353 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
354 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
355 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
356 }
357
Vladimir Marko87584542017-12-12 17:47:52 +0000358 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000359 SaveLiveRegisters(codegen, locations);
360 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000361
362 // We're moving two locations to locations that could overlap, so we need a parallel
363 // move resolver.
364 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800366 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800368 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800369 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100370 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100372 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800373 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000379
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 if (!is_fatal_) {
381 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000382 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000384
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 RestoreLiveRegisters(codegen, locations);
386 __ jmp(GetExitLabel());
387 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388 }
389
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100390 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000393
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000394 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000395 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396
397 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
398};
399
Andreas Gampe85b62f22015-09-09 13:15:38 -0700400class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700401 public:
Aart Bik42249c32016-01-07 15:33:50 -0800402 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000406 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100408 LocationSummary* locations = instruction_->GetLocations();
409 SaveLiveRegisters(codegen, locations);
410 InvokeRuntimeCallingConvention calling_convention;
411 x86_64_codegen->Load32BitValue(
412 CpuRegister(calling_convention.GetRegisterAt(0)),
413 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100414 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100415 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100418 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100434 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100448 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100453 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000454 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100455 RestoreLiveRegisters(codegen, locations);
456 __ jmp(GetExitLabel());
457 }
458
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100459 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100460
461 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
463};
464
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465// Slow path marking an object reference `ref` during a read
466// barrier. The field `obj.field` in the object `obj` holding this
467// reference does not get updated by this slow path after marking (see
468// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
469//
470// This means that after the execution of this slow path, `ref` will
471// always be up-to-date, but `obj.field` may not; i.e., after the
472// flip, `ref` will be a to-space reference, but `obj.field` will
473// probably still be a from-space reference (unless it gets updated by
474// another thread, or if another thread installed another object
475// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
477 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
479 Location ref,
480 bool unpoison_ref_before_marking)
481 : SlowPathCode(instruction),
482 ref_(ref),
483 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(kEmitCompilerReadBarrier);
485 }
486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100489 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100491 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
492 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100494 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 DCHECK(instruction_->IsInstanceFieldGet() ||
496 instruction_->IsStaticFieldGet() ||
497 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100498 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 instruction_->IsLoadClass() ||
500 instruction_->IsLoadString() ||
501 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100502 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100503 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
504 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 << "Unexpected instruction in read barrier marking slow path: "
506 << instruction_->DebugName();
507
508 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000510 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100511 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000512 }
Roland Levillain4359e612016-07-20 11:32:19 +0100513 // No need to save live registers; it's taken care of by the
514 // entrypoint. Also, there is no need to update the stack mask,
515 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 DCHECK_NE(ref_reg, RSP);
518 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100519 // "Compact" slow path, saving two moves.
520 //
521 // Instead of using the standard runtime calling convention (input
522 // and output in R0):
523 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100525 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100527 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100529 // of a dedicated entrypoint:
530 //
531 // rX <- ReadBarrierMarkRegX(rX)
532 //
533 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100534 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100535 // This runtime call does not require a stack map.
536 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537 __ jmp(GetExitLabel());
538 }
539
540 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100541 // The location (register) of the marked object reference.
542 const Location ref_;
543 // Should the reference in `ref_` be unpoisoned prior to marking it?
544 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000545
546 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
547};
548
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100549// Slow path marking an object reference `ref` during a read barrier,
550// and if needed, atomically updating the field `obj.field` in the
551// object `obj` holding this reference after marking (contrary to
552// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
553// `obj.field`).
554//
555// This means that after the execution of this slow path, both `ref`
556// and `obj.field` will be up-to-date; i.e., after the flip, both will
557// hold the same to-space reference (unless another thread installed
558// another object reference (different from `ref`) in `obj.field`).
559class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
560 public:
561 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
562 Location ref,
563 CpuRegister obj,
564 const Address& field_addr,
565 bool unpoison_ref_before_marking,
566 CpuRegister temp1,
567 CpuRegister temp2)
568 : SlowPathCode(instruction),
569 ref_(ref),
570 obj_(obj),
571 field_addr_(field_addr),
572 unpoison_ref_before_marking_(unpoison_ref_before_marking),
573 temp1_(temp1),
574 temp2_(temp2) {
575 DCHECK(kEmitCompilerReadBarrier);
576 }
577
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100579 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
580 }
581
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100582 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100583 LocationSummary* locations = instruction_->GetLocations();
584 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
585 Register ref_reg = ref_cpu_reg.AsRegister();
586 DCHECK(locations->CanCall());
587 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
588 // This slow path is only used by the UnsafeCASObject intrinsic.
589 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
590 << "Unexpected instruction in read barrier marking and field updating slow path: "
591 << instruction_->DebugName();
592 DCHECK(instruction_->GetLocations()->Intrinsified());
593 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
594
595 __ Bind(GetEntryLabel());
596 if (unpoison_ref_before_marking_) {
597 // Object* ref = ref_addr->AsMirrorPtr()
598 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
599 }
600
601 // Save the old (unpoisoned) reference.
602 __ movl(temp1_, ref_cpu_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
608 DCHECK_NE(ref_reg, RSP);
609 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
610 // "Compact" slow path, saving two moves.
611 //
612 // Instead of using the standard runtime calling convention (input
613 // and output in R0):
614 //
615 // RDI <- ref
616 // RAX <- ReadBarrierMark(RDI)
617 // ref <- RAX
618 //
619 // we just use rX (the register containing `ref`) as input and output
620 // of a dedicated entrypoint:
621 //
622 // rX <- ReadBarrierMarkRegX(rX)
623 //
624 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100625 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100626 // This runtime call does not require a stack map.
627 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
628
629 // If the new reference is different from the old reference,
630 // update the field in the holder (`*field_addr`).
631 //
632 // Note that this field could also hold a different object, if
633 // another thread had concurrently changed it. In that case, the
634 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
635 // operation below would abort the CAS, leaving the field as-is.
636 NearLabel done;
637 __ cmpl(temp1_, ref_cpu_reg);
638 __ j(kEqual, &done);
639
640 // Update the the holder's field atomically. This may fail if
641 // mutator updates before us, but it's OK. This is achived
642 // using a strong compare-and-set (CAS) operation with relaxed
643 // memory synchronization ordering, where the expected value is
644 // the old reference and the desired value is the new reference.
645 // This operation is implemented with a 32-bit LOCK CMPXLCHG
646 // instruction, which requires the expected value (the old
647 // reference) to be in EAX. Save RAX beforehand, and move the
648 // expected value (stored in `temp1_`) into EAX.
649 __ movq(temp2_, CpuRegister(RAX));
650 __ movl(CpuRegister(RAX), temp1_);
651
652 // Convenience aliases.
653 CpuRegister base = obj_;
654 CpuRegister expected = CpuRegister(RAX);
655 CpuRegister value = ref_cpu_reg;
656
657 bool base_equals_value = (base.AsRegister() == value.AsRegister());
658 Register value_reg = ref_reg;
659 if (kPoisonHeapReferences) {
660 if (base_equals_value) {
661 // If `base` and `value` are the same register location, move
662 // `value_reg` to a temporary register. This way, poisoning
663 // `value_reg` won't invalidate `base`.
664 value_reg = temp1_.AsRegister();
665 __ movl(CpuRegister(value_reg), base);
666 }
667
668 // Check that the register allocator did not assign the location
669 // of `expected` (RAX) to `value` nor to `base`, so that heap
670 // poisoning (when enabled) works as intended below.
671 // - If `value` were equal to `expected`, both references would
672 // be poisoned twice, meaning they would not be poisoned at
673 // all, as heap poisoning uses address negation.
674 // - If `base` were equal to `expected`, poisoning `expected`
675 // would invalidate `base`.
676 DCHECK_NE(value_reg, expected.AsRegister());
677 DCHECK_NE(base.AsRegister(), expected.AsRegister());
678
679 __ PoisonHeapReference(expected);
680 __ PoisonHeapReference(CpuRegister(value_reg));
681 }
682
683 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
684
685 // If heap poisoning is enabled, we need to unpoison the values
686 // that were poisoned earlier.
687 if (kPoisonHeapReferences) {
688 if (base_equals_value) {
689 // `value_reg` has been moved to a temporary register, no need
690 // to unpoison it.
691 } else {
692 __ UnpoisonHeapReference(CpuRegister(value_reg));
693 }
694 // No need to unpoison `expected` (RAX), as it is be overwritten below.
695 }
696
697 // Restore RAX.
698 __ movq(CpuRegister(RAX), temp2_);
699
700 __ Bind(&done);
701 __ jmp(GetExitLabel());
702 }
703
704 private:
705 // The location (register) of the marked object reference.
706 const Location ref_;
707 // The register containing the object holding the marked object reference field.
708 const CpuRegister obj_;
709 // The address of the marked reference field. The base of this address must be `obj_`.
710 const Address field_addr_;
711
712 // Should the reference in `ref_` be unpoisoned prior to marking it?
713 const bool unpoison_ref_before_marking_;
714
715 const CpuRegister temp1_;
716 const CpuRegister temp2_;
717
718 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
719};
720
Roland Levillain0d5a2812015-11-13 10:07:31 +0000721// Slow path generating a read barrier for a heap reference.
722class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
723 public:
724 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
725 Location out,
726 Location ref,
727 Location obj,
728 uint32_t offset,
729 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000730 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000731 out_(out),
732 ref_(ref),
733 obj_(obj),
734 offset_(offset),
735 index_(index) {
736 DCHECK(kEmitCompilerReadBarrier);
737 // If `obj` is equal to `out` or `ref`, it means the initial
738 // object has been overwritten by (or after) the heap object
739 // reference load to be instrumented, e.g.:
740 //
741 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000742 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743 //
744 // In that case, we have lost the information about the original
745 // object, and the emitted read barrier cannot work properly.
746 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
747 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
748}
749
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100750 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
752 LocationSummary* locations = instruction_->GetLocations();
753 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
754 DCHECK(locations->CanCall());
755 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100756 DCHECK(instruction_->IsInstanceFieldGet() ||
757 instruction_->IsStaticFieldGet() ||
758 instruction_->IsArrayGet() ||
759 instruction_->IsInstanceOf() ||
760 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700761 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000762 << "Unexpected instruction in read barrier for heap reference slow path: "
763 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000764
765 __ Bind(GetEntryLabel());
766 SaveLiveRegisters(codegen, locations);
767
768 // We may have to change the index's value, but as `index_` is a
769 // constant member (like other "inputs" of this slow path),
770 // introduce a copy of it, `index`.
771 Location index = index_;
772 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100773 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000774 if (instruction_->IsArrayGet()) {
775 // Compute real offset and store it in index_.
776 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
777 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
778 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
779 // We are about to change the value of `index_reg` (see the
780 // calls to art::x86_64::X86_64Assembler::shll and
781 // art::x86_64::X86_64Assembler::AddImmediate below), but it
782 // has not been saved by the previous call to
783 // art::SlowPathCode::SaveLiveRegisters, as it is a
784 // callee-save register --
785 // art::SlowPathCode::SaveLiveRegisters does not consider
786 // callee-save registers, as it has been designed with the
787 // assumption that callee-save registers are supposed to be
788 // handled by the called function. So, as a callee-save
789 // register, `index_reg` _would_ eventually be saved onto
790 // the stack, but it would be too late: we would have
791 // changed its value earlier. Therefore, we manually save
792 // it here into another freely available register,
793 // `free_reg`, chosen of course among the caller-save
794 // registers (as a callee-save `free_reg` register would
795 // exhibit the same problem).
796 //
797 // Note we could have requested a temporary register from
798 // the register allocator instead; but we prefer not to, as
799 // this is a slow path, and we know we can find a
800 // caller-save register that is available.
801 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
802 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
803 index_reg = free_reg;
804 index = Location::RegisterLocation(index_reg);
805 } else {
806 // The initial register stored in `index_` has already been
807 // saved in the call to art::SlowPathCode::SaveLiveRegisters
808 // (as it is not a callee-save register), so we can freely
809 // use it.
810 }
811 // Shifting the index value contained in `index_reg` by the
812 // scale factor (2) cannot overflow in practice, as the
813 // runtime is unable to allocate object arrays with a size
814 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
815 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
816 static_assert(
817 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
818 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
819 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
820 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100821 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
822 // intrinsics, `index_` is not shifted by a scale factor of 2
823 // (as in the case of ArrayGet), as it is actually an offset
824 // to an object field within an object.
825 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 DCHECK(instruction_->GetLocations()->Intrinsified());
827 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
828 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
829 << instruction_->AsInvoke()->GetIntrinsic();
830 DCHECK_EQ(offset_, 0U);
831 DCHECK(index_.IsRegister());
832 }
833 }
834
835 // We're moving two or three locations to locations that could
836 // overlap, so we need a parallel move resolver.
837 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100838 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 parallel_move.AddMove(ref_,
840 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100841 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000842 nullptr);
843 parallel_move.AddMove(obj_,
844 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000846 nullptr);
847 if (index.IsValid()) {
848 parallel_move.AddMove(index,
849 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100850 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000851 nullptr);
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 } else {
854 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
855 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
856 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100857 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000858 instruction_,
859 instruction_->GetDexPc(),
860 this);
861 CheckEntrypointTypes<
862 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
863 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
864
865 RestoreLiveRegisters(codegen, locations);
866 __ jmp(GetExitLabel());
867 }
868
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100869 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 return "ReadBarrierForHeapReferenceSlowPathX86_64";
871 }
872
873 private:
874 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
875 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
876 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
877 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
878 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
879 return static_cast<CpuRegister>(i);
880 }
881 }
882 // We shall never fail to find a free caller-save register, as
883 // there are more than two core caller-save registers on x86-64
884 // (meaning it is possible to find one which is different from
885 // `ref` and `obj`).
886 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
887 LOG(FATAL) << "Could not find a free caller-save register";
888 UNREACHABLE();
889 }
890
Roland Levillain0d5a2812015-11-13 10:07:31 +0000891 const Location out_;
892 const Location ref_;
893 const Location obj_;
894 const uint32_t offset_;
895 // An additional location containing an index to an array.
896 // Only used for HArrayGet and the UnsafeGetObject &
897 // UnsafeGetObjectVolatile intrinsics.
898 const Location index_;
899
900 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
901};
902
903// Slow path generating a read barrier for a GC root.
904class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
905 public:
906 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000907 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000908 DCHECK(kEmitCompilerReadBarrier);
909 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100911 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 LocationSummary* locations = instruction_->GetLocations();
913 DCHECK(locations->CanCall());
914 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000915 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
916 << "Unexpected instruction in read barrier for GC root slow path: "
917 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918
919 __ Bind(GetEntryLabel());
920 SaveLiveRegisters(codegen, locations);
921
922 InvokeRuntimeCallingConvention calling_convention;
923 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
924 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100925 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000926 instruction_,
927 instruction_->GetDexPc(),
928 this);
929 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
930 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
931
932 RestoreLiveRegisters(codegen, locations);
933 __ jmp(GetExitLabel());
934 }
935
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100936 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000937
938 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000939 const Location out_;
940 const Location root_;
941
942 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
943};
944
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100945#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100946// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
947#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100948
Roland Levillain4fa13f62015-07-06 18:11:54 +0100949inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700950 switch (cond) {
951 case kCondEQ: return kEqual;
952 case kCondNE: return kNotEqual;
953 case kCondLT: return kLess;
954 case kCondLE: return kLessEqual;
955 case kCondGT: return kGreater;
956 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700957 case kCondB: return kBelow;
958 case kCondBE: return kBelowEqual;
959 case kCondA: return kAbove;
960 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700961 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 LOG(FATAL) << "Unreachable";
963 UNREACHABLE();
964}
965
Aart Bike9f37602015-10-09 11:15:55 -0700966// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100967inline Condition X86_64FPCondition(IfCondition cond) {
968 switch (cond) {
969 case kCondEQ: return kEqual;
970 case kCondNE: return kNotEqual;
971 case kCondLT: return kBelow;
972 case kCondLE: return kBelowEqual;
973 case kCondGT: return kAbove;
974 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700975 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800976 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100977 LOG(FATAL) << "Unreachable";
978 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700979}
980
Vladimir Markodc151b22015-10-15 18:02:30 +0100981HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
982 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100983 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000984 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100985}
986
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100987void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
988 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800989 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000990
Vladimir Marko58155012015-08-19 12:49:41 +0000991 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
992 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000994 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100995 uint32_t offset =
996 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Andreas Gampe3db70682018-12-26 15:12:03 -0800997 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000998 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100999 }
Vladimir Marko58155012015-08-19 12:49:41 +00001000 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00001001 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001002 break;
Vladimir Marko65979462017-05-19 17:25:12 +01001003 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001004 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001005 __ leal(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001006 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001007 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001008 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001009 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1010 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1011 __ movl(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001012 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001013 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001014 break;
1015 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001016 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001017 __ movq(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001018 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001019 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001020 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001021 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001022 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001023 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
1024 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
1025 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001026 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1027 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1028 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001029 }
Vladimir Marko58155012015-08-19 12:49:41 +00001030 }
1031
1032 switch (invoke->GetCodePtrLocation()) {
1033 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1034 __ call(&frame_entry_label_);
1035 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001036 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1037 // (callee_method + offset_of_quick_compiled_code)()
1038 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1039 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001040 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001041 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001042 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001043 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001044
1045 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001046}
1047
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001048void CodeGeneratorX86_64::GenerateVirtualCall(
1049 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1051 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1052 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001053
1054 // Use the calling convention instead of the location of the receiver, as
1055 // intrinsics may have put the receiver in a different register. In the intrinsics
1056 // slow path, the arguments have been moved to the right place, so here we are
1057 // guaranteed that the receiver is the first register of the calling convention.
1058 InvokeDexCallingConvention calling_convention;
1059 Register receiver = calling_convention.GetRegisterAt(0);
1060
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001062 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001063 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001064 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001065 // Instead of simply (possibly) unpoisoning `temp` here, we should
1066 // emit a read barrier for the previous class reference load.
1067 // However this is not required in practice, as this is an
1068 // intermediate/temporary reference and because the current
1069 // concurrent copying collector keeps the from-space memory
1070 // intact/accessible until the end of the marking phase (the
1071 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001072 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001073
1074 MaybeGenerateInlineCacheCheck(invoke->GetDexPc(), temp);
1075
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076 // temp = temp->GetMethodAt(method_offset);
1077 __ movq(temp, Address(temp, method_offset));
1078 // call temp->GetEntryPoint();
1079 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001080 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001081 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001082}
1083
Vladimir Marko6fd16062018-06-26 11:02:04 +01001084void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001085 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1086 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001087}
1088
Vladimir Markob066d432018-01-03 13:14:37 +00001089void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001090 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1091 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001092}
1093
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001094void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1095 boot_image_method_patches_.emplace_back(
1096 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001097 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001098}
1099
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001100void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1101 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1102 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001103}
1104
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001105void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1106 boot_image_type_patches_.emplace_back(
1107 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001108 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001109}
1110
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001111Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001112 type_bss_entry_patches_.emplace_back(
1113 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001114 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001115}
1116
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001117void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1118 boot_image_string_patches_.emplace_back(
1119 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1120 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001121}
1122
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001124 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001125 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001126 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001127}
1128
Vladimir Marko6fd16062018-06-26 11:02:04 +01001129void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1130 if (GetCompilerOptions().IsBootImage()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001131 __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001132 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001133 } else if (GetCompilerOptions().GetCompilePic()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001134 __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001135 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001136 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001137 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001138 gc::Heap* heap = Runtime::Current()->GetHeap();
1139 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001140 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001141 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1142 }
1143}
1144
Vladimir Marko6fd16062018-06-26 11:02:04 +01001145void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1146 uint32_t boot_image_offset) {
1147 DCHECK(invoke->IsStatic());
1148 InvokeRuntimeCallingConvention calling_convention;
1149 CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
1150 if (GetCompilerOptions().IsBootImage()) {
1151 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1152 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1153 __ leal(argument,
Andreas Gampe3db70682018-12-26 15:12:03 -08001154 Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001155 MethodReference target_method = invoke->GetTargetMethod();
1156 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1157 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1158 __ Bind(&boot_image_type_patches_.back().label);
1159 } else {
1160 LoadBootImageAddress(argument, boot_image_offset);
1161 }
1162 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1163 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1164}
1165
Vladimir Markoaad75c62016-10-03 08:46:48 +00001166// The label points to the end of the "movl" or another instruction but the literal offset
1167// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1168constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1169
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001170template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1172 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001173 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001174 for (const PatchInfo<Label>& info : infos) {
1175 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1176 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001177 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001178 }
1179}
1180
Vladimir Marko6fd16062018-06-26 11:02:04 +01001181template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1182linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1183 const DexFile* target_dex_file,
1184 uint32_t pc_insn_offset,
1185 uint32_t boot_image_offset) {
1186 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1187 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001188}
1189
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001190void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001191 DCHECK(linker_patches->empty());
1192 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001193 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001194 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001195 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001196 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001197 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001198 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001199 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001200 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001201 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001202 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1203 boot_image_method_patches_, linker_patches);
1204 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1205 boot_image_type_patches_, linker_patches);
1206 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001207 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001208 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001209 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001210 DCHECK(boot_image_type_patches_.empty());
1211 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001212 }
1213 if (GetCompilerOptions().IsBootImage()) {
1214 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1215 boot_image_other_patches_, linker_patches);
1216 } else {
1217 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1218 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001219 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001220 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1221 method_bss_entry_patches_, linker_patches);
1222 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1223 type_bss_entry_patches_, linker_patches);
1224 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1225 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001226 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001227}
1228
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001229void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001230 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231}
1232
1233void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001234 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001235}
1236
Vladimir Markoa0431112018-06-25 09:32:54 +01001237const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1238 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1239}
1240
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001241size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1242 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1243 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001244}
1245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001246size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1247 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1248 return kX86_64WordSize;
1249}
1250
1251size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001252 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001253 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001254 } else {
1255 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1256 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001257 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001258}
1259
1260size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001261 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001262 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001263 } else {
1264 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1265 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001266 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001267}
1268
Calin Juravle175dc732015-08-25 15:42:32 +01001269void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1270 HInstruction* instruction,
1271 uint32_t dex_pc,
1272 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001273 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001274 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1275 if (EntrypointRequiresStackMap(entrypoint)) {
1276 RecordPcInfo(instruction, dex_pc, slow_path);
1277 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001278}
1279
Roland Levillaindec8f632016-07-22 17:10:06 +01001280void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1281 HInstruction* instruction,
1282 SlowPathCode* slow_path) {
1283 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001284 GenerateInvokeRuntime(entry_point_offset);
1285}
1286
1287void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001288 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001289}
1290
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001291static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001292// Use a fake return address register to mimic Quick.
1293static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001294CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001295 const CompilerOptions& compiler_options,
1296 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001297 : CodeGenerator(graph,
1298 kNumberOfCpuRegisters,
1299 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001300 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001301 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1302 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001303 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001304 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1305 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001306 compiler_options,
1307 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001308 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001309 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001310 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001311 move_resolver_(graph->GetAllocator(), this),
1312 assembler_(graph->GetAllocator()),
Vladimir Marko58155012015-08-19 12:49:41 +00001313 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001314 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1315 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1316 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1317 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001318 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001319 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001320 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001321 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1322 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1323 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001324 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1325}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001326
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001327InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1328 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001329 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330 assembler_(codegen->GetAssembler()),
1331 codegen_(codegen) {}
1332
David Brazdil58282f42016-01-14 12:45:10 +00001333void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001335 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001337 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001338 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001341static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001342 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001343}
David Srbecky9d8606d2015-04-12 09:35:32 +01001344
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001345static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001346 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001347}
1348
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001349void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001350 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001351 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001352 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001353 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001354 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001355
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001356 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001357 NearLabel overflow;
1358 __ cmpw(Address(CpuRegister(kMethodRegisterArgument),
1359 ArtMethod::HotnessCountOffset().Int32Value()),
1360 Immediate(ArtMethod::MaxCounter()));
1361 __ j(kEqual, &overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001362 __ addw(Address(CpuRegister(kMethodRegisterArgument),
1363 ArtMethod::HotnessCountOffset().Int32Value()),
1364 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001365 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001366 }
1367
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001368 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001369 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1370 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001371 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001372 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001373
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001374 if (HasEmptyFrame()) {
1375 return;
1376 }
1377
Nicolas Geoffray98893962015-01-21 12:32:32 +00001378 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001379 Register reg = kCoreCalleeSaves[i];
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001380 if (allocated_registers_.ContainsCoreRegister(reg)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001381 __ pushq(CpuRegister(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001382 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1383 __ cfi().RelOffset(DWARFReg(reg), 0);
Nicolas Geoffray98893962015-01-21 12:32:32 +00001384 }
1385 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001386
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001387 int adjust = GetFrameSize() - GetCoreSpillSize();
1388 __ subq(CpuRegister(RSP), Immediate(adjust));
1389 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001390 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001391 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001392
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001393 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1394 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001395 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1396 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1397 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001398 }
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01001399 }
1400
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001401 // Save the current method if we need it. Note that we do not
1402 // do this in HCurrentMethod, as the instruction might have been removed
1403 // in the SSA graph.
1404 if (RequiresCurrentMethod()) {
1405 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1406 CpuRegister(kMethodRegisterArgument));
1407 }
Nicolas Geoffrayf7893532017-06-15 12:34:36 +01001408
1409 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1410 // Initialize should_deoptimize flag to 0.
1411 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
1412 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001413}
1414
1415void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001416 __ cfi().RememberState();
1417 if (!HasEmptyFrame()) {
1418 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001419 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001420 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1421 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1422 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1423 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1424 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1425 }
1426 }
1427
1428 int adjust = GetFrameSize() - GetCoreSpillSize();
1429 __ addq(CpuRegister(RSP), Immediate(adjust));
1430 __ cfi().AdjustCFAOffset(-adjust);
1431
1432 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1433 Register reg = kCoreCalleeSaves[i];
1434 if (allocated_registers_.ContainsCoreRegister(reg)) {
1435 __ popq(CpuRegister(reg));
1436 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1437 __ cfi().Restore(DWARFReg(reg));
1438 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001439 }
1440 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001441 __ ret();
1442 __ cfi().RestoreState();
1443 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001444}
1445
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001446void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1447 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001448}
1449
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001450void CodeGeneratorX86_64::Move(Location destination, Location source) {
1451 if (source.Equals(destination)) {
1452 return;
1453 }
1454 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001455 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001456 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001457 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001458 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001459 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001460 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001461 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1462 } else if (source.IsConstant()) {
1463 HConstant* constant = source.GetConstant();
1464 if (constant->IsLongConstant()) {
1465 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1466 } else {
1467 Load32BitValue(dest, GetInt32ValueOf(constant));
1468 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001469 } else {
1470 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001471 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001472 }
1473 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001474 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001475 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001476 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001477 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001478 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1479 } else if (source.IsConstant()) {
1480 HConstant* constant = source.GetConstant();
1481 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1482 if (constant->IsFloatConstant()) {
1483 Load32BitValue(dest, static_cast<int32_t>(value));
1484 } else {
1485 Load64BitValue(dest, value);
1486 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001487 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001488 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001489 } else {
1490 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001491 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001492 }
1493 } else if (destination.IsStackSlot()) {
1494 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001495 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001496 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001497 } else if (source.IsFpuRegister()) {
1498 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001499 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001500 } else if (source.IsConstant()) {
1501 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001502 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001503 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001504 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001505 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001506 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1507 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001508 }
1509 } else {
1510 DCHECK(destination.IsDoubleStackSlot());
1511 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001512 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001513 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001514 } else if (source.IsFpuRegister()) {
1515 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001516 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001517 } else if (source.IsConstant()) {
1518 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001519 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1520 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001521 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001522 } else {
1523 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001524 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1525 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001526 }
1527 }
1528}
1529
Calin Juravle175dc732015-08-25 15:42:32 +01001530void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1531 DCHECK(location.IsRegister());
1532 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1533}
1534
Calin Juravlee460d1d2015-09-29 04:52:17 +01001535void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001536 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001537 Move(dst, src);
1538}
1539
1540void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1541 if (location.IsRegister()) {
1542 locations->AddTemp(location);
1543 } else {
1544 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1545 }
1546}
1547
David Brazdilfc6a86a2015-06-26 10:33:45 +00001548void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001549 if (successor->IsExitBlock()) {
1550 DCHECK(got->GetPrevious()->AlwaysThrows());
1551 return; // no code needed
1552 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001553
1554 HBasicBlock* block = got->GetBlock();
1555 HInstruction* previous = got->GetPrevious();
1556
1557 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001558 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001559 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
1560 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), 0));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001561 NearLabel overflow;
1562 __ cmpw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1563 Immediate(ArtMethod::MaxCounter()));
1564 __ j(kEqual, &overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001565 __ addw(Address(CpuRegister(TMP), ArtMethod::HotnessCountOffset().Int32Value()),
1566 Immediate(1));
Mathieu Chartier7f8678e2019-08-30 16:22:28 -07001567 __ Bind(&overflow);
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001568 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001569 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1570 return;
1571 }
1572
1573 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1574 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1575 }
1576 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001577 __ jmp(codegen_->GetLabelOf(successor));
1578 }
1579}
1580
David Brazdilfc6a86a2015-06-26 10:33:45 +00001581void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1582 got->SetLocations(nullptr);
1583}
1584
1585void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1586 HandleGoto(got, got->GetSuccessor());
1587}
1588
1589void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1590 try_boundary->SetLocations(nullptr);
1591}
1592
1593void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1594 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1595 if (!successor->IsExitBlock()) {
1596 HandleGoto(try_boundary, successor);
1597 }
1598}
1599
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001600void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1601 exit->SetLocations(nullptr);
1602}
1603
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001604void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001605}
1606
Mark Mendell152408f2015-12-31 12:28:50 -05001607template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001608void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001609 LabelType* true_label,
1610 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001611 if (cond->IsFPConditionTrueIfNaN()) {
1612 __ j(kUnordered, true_label);
1613 } else if (cond->IsFPConditionFalseIfNaN()) {
1614 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001615 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001616 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001617}
1618
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001619void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001620 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001621
Mark Mendellc4701932015-04-10 13:18:51 -04001622 Location left = locations->InAt(0);
1623 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001624 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001625 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001626 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001627 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001628 case DataType::Type::kInt8:
1629 case DataType::Type::kUint16:
1630 case DataType::Type::kInt16:
1631 case DataType::Type::kInt32:
1632 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001633 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001634 break;
1635 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001636 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001637 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001638 break;
1639 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001640 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001641 if (right.IsFpuRegister()) {
1642 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1643 } else if (right.IsConstant()) {
1644 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1645 codegen_->LiteralFloatAddress(
1646 right.GetConstant()->AsFloatConstant()->GetValue()));
1647 } else {
1648 DCHECK(right.IsStackSlot());
1649 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1650 Address(CpuRegister(RSP), right.GetStackIndex()));
1651 }
Mark Mendellc4701932015-04-10 13:18:51 -04001652 break;
1653 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001654 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001655 if (right.IsFpuRegister()) {
1656 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1657 } else if (right.IsConstant()) {
1658 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1659 codegen_->LiteralDoubleAddress(
1660 right.GetConstant()->AsDoubleConstant()->GetValue()));
1661 } else {
1662 DCHECK(right.IsDoubleStackSlot());
1663 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1664 Address(CpuRegister(RSP), right.GetStackIndex()));
1665 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001666 break;
1667 }
1668 default:
1669 LOG(FATAL) << "Unexpected condition type " << type;
1670 }
1671}
1672
1673template<class LabelType>
1674void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1675 LabelType* true_target_in,
1676 LabelType* false_target_in) {
1677 // Generated branching requires both targets to be explicit. If either of the
1678 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1679 LabelType fallthrough_target;
1680 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1681 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1682
1683 // Generate the comparison to set the CC.
1684 GenerateCompareTest(condition);
1685
1686 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001687 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001688 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001689 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001690 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1691 break;
1692 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001693 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001694 GenerateFPJumps(condition, true_target, false_target);
1695 break;
1696 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001697 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001698 GenerateFPJumps(condition, true_target, false_target);
1699 break;
1700 }
1701 default:
1702 LOG(FATAL) << "Unexpected condition type " << type;
1703 }
1704
David Brazdil0debae72015-11-12 18:37:00 +00001705 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001706 __ jmp(false_target);
1707 }
David Brazdil0debae72015-11-12 18:37:00 +00001708
1709 if (fallthrough_target.IsLinked()) {
1710 __ Bind(&fallthrough_target);
1711 }
Mark Mendellc4701932015-04-10 13:18:51 -04001712}
1713
David Brazdil0debae72015-11-12 18:37:00 +00001714static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1715 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1716 // are set only strictly before `branch`. We can't use the eflags on long
1717 // conditions if they are materialized due to the complex branching.
1718 return cond->IsCondition() &&
1719 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001720 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001721}
1722
Mark Mendell152408f2015-12-31 12:28:50 -05001723template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001724void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001725 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001726 LabelType* true_target,
1727 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001728 HInstruction* cond = instruction->InputAt(condition_input_index);
1729
1730 if (true_target == nullptr && false_target == nullptr) {
1731 // Nothing to do. The code always falls through.
1732 return;
1733 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001734 // Constant condition, statically compared against "true" (integer value 1).
1735 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001736 if (true_target != nullptr) {
1737 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001738 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001739 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001740 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001741 if (false_target != nullptr) {
1742 __ jmp(false_target);
1743 }
1744 }
1745 return;
1746 }
1747
1748 // The following code generates these patterns:
1749 // (1) true_target == nullptr && false_target != nullptr
1750 // - opposite condition true => branch to false_target
1751 // (2) true_target != nullptr && false_target == nullptr
1752 // - condition true => branch to true_target
1753 // (3) true_target != nullptr && false_target != nullptr
1754 // - condition true => branch to true_target
1755 // - branch to false_target
1756 if (IsBooleanValueOrMaterializedCondition(cond)) {
1757 if (AreEflagsSetFrom(cond, instruction)) {
1758 if (true_target == nullptr) {
1759 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1760 } else {
1761 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1762 }
1763 } else {
1764 // Materialized condition, compare against 0.
1765 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1766 if (lhs.IsRegister()) {
1767 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1768 } else {
1769 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1770 }
1771 if (true_target == nullptr) {
1772 __ j(kEqual, false_target);
1773 } else {
1774 __ j(kNotEqual, true_target);
1775 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001776 }
1777 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001778 // Condition has not been materialized, use its inputs as the
1779 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001780 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001781
David Brazdil0debae72015-11-12 18:37:00 +00001782 // If this is a long or FP comparison that has been folded into
1783 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001784 DataType::Type type = condition->InputAt(0)->GetType();
1785 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001786 GenerateCompareTestAndBranch(condition, true_target, false_target);
1787 return;
1788 }
1789
1790 Location lhs = condition->GetLocations()->InAt(0);
1791 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001792 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001793 if (true_target == nullptr) {
1794 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1795 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001796 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001797 }
Dave Allison20dfc792014-06-16 20:44:29 -07001798 }
David Brazdil0debae72015-11-12 18:37:00 +00001799
1800 // If neither branch falls through (case 3), the conditional branch to `true_target`
1801 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1802 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001803 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001804 }
1805}
1806
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001807void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001808 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001809 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001810 locations->SetInAt(0, Location::Any());
1811 }
1812}
1813
1814void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001815 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1816 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1817 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1818 nullptr : codegen_->GetLabelOf(true_successor);
1819 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1820 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001821 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001822}
1823
1824void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001825 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001826 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001827 InvokeRuntimeCallingConvention calling_convention;
1828 RegisterSet caller_saves = RegisterSet::Empty();
1829 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1830 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001831 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001832 locations->SetInAt(0, Location::Any());
1833 }
1834}
1835
1836void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001837 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001838 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001839 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001840 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001841 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001842}
1843
Mingyao Yang063fc772016-08-02 11:02:54 -07001844void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001845 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001846 LocationSummary(flag, LocationSummary::kNoCall);
1847 locations->SetOut(Location::RequiresRegister());
1848}
1849
1850void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1851 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1852 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1853}
1854
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001855static bool SelectCanUseCMOV(HSelect* select) {
1856 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001857 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001858 return false;
1859 }
1860
1861 // A FP condition doesn't generate the single CC that we need.
1862 HInstruction* condition = select->GetCondition();
1863 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001864 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001865 return false;
1866 }
1867
1868 // We can generate a CMOV for this Select.
1869 return true;
1870}
1871
David Brazdil74eb1b22015-12-14 11:44:01 +00001872void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001873 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001874 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001875 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001876 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001877 } else {
1878 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001879 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001880 if (select->InputAt(1)->IsConstant()) {
1881 locations->SetInAt(1, Location::RequiresRegister());
1882 } else {
1883 locations->SetInAt(1, Location::Any());
1884 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001885 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001886 locations->SetInAt(1, Location::Any());
1887 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001888 }
1889 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1890 locations->SetInAt(2, Location::RequiresRegister());
1891 }
1892 locations->SetOut(Location::SameAsFirstInput());
1893}
1894
1895void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1896 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001897 if (SelectCanUseCMOV(select)) {
1898 // If both the condition and the source types are integer, we can generate
1899 // a CMOV to implement Select.
1900 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001901 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001902 DCHECK(locations->InAt(0).Equals(locations->Out()));
1903
1904 HInstruction* select_condition = select->GetCondition();
1905 Condition cond = kNotEqual;
1906
1907 // Figure out how to test the 'condition'.
1908 if (select_condition->IsCondition()) {
1909 HCondition* condition = select_condition->AsCondition();
1910 if (!condition->IsEmittedAtUseSite()) {
1911 // This was a previously materialized condition.
1912 // Can we use the existing condition code?
1913 if (AreEflagsSetFrom(condition, select)) {
1914 // Materialization was the previous instruction. Condition codes are right.
1915 cond = X86_64IntegerCondition(condition->GetCondition());
1916 } else {
1917 // No, we have to recreate the condition code.
1918 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1919 __ testl(cond_reg, cond_reg);
1920 }
1921 } else {
1922 GenerateCompareTest(condition);
1923 cond = X86_64IntegerCondition(condition->GetCondition());
1924 }
1925 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001926 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001927 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1928 __ testl(cond_reg, cond_reg);
1929 }
1930
1931 // If the condition is true, overwrite the output, which already contains false.
1932 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001933 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001934 if (value_true_loc.IsRegister()) {
1935 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1936 } else {
1937 __ cmov(cond,
1938 value_false,
1939 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1940 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001941 } else {
1942 NearLabel false_target;
1943 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08001944 /* condition_input_index= */ 2,
1945 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001946 &false_target);
1947 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1948 __ Bind(&false_target);
1949 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001950}
1951
David Srbecky0cf44932015-12-09 14:09:59 +00001952void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001953 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001954}
1955
David Srbeckyd28f4a02016-03-14 17:14:24 +00001956void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1957 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001958}
1959
1960void CodeGeneratorX86_64::GenerateNop() {
1961 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001962}
1963
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001964void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001965 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001966 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001967 // Handle the long/FP comparisons made in instruction simplification.
1968 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001969 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001970 locations->SetInAt(0, Location::RequiresRegister());
1971 locations->SetInAt(1, Location::Any());
1972 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001973 case DataType::Type::kFloat32:
1974 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04001975 locations->SetInAt(0, Location::RequiresFpuRegister());
1976 locations->SetInAt(1, Location::Any());
1977 break;
1978 default:
1979 locations->SetInAt(0, Location::RequiresRegister());
1980 locations->SetInAt(1, Location::Any());
1981 break;
1982 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001983 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001984 locations->SetOut(Location::RequiresRegister());
1985 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001986}
1987
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001988void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001989 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001990 return;
Dave Allison20dfc792014-06-16 20:44:29 -07001991 }
Mark Mendellc4701932015-04-10 13:18:51 -04001992
1993 LocationSummary* locations = cond->GetLocations();
1994 Location lhs = locations->InAt(0);
1995 Location rhs = locations->InAt(1);
1996 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05001997 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001998
1999 switch (cond->InputAt(0)->GetType()) {
2000 default:
2001 // Integer case.
2002
2003 // Clear output register: setcc only sets the low byte.
2004 __ xorl(reg, reg);
2005
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002006 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002007 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002008 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002009 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002010 // Clear output register: setcc only sets the low byte.
2011 __ xorl(reg, reg);
2012
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002013 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002014 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002015 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002016 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002017 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2018 if (rhs.IsConstant()) {
2019 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2020 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2021 } else if (rhs.IsStackSlot()) {
2022 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2023 } else {
2024 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2025 }
2026 GenerateFPJumps(cond, &true_label, &false_label);
2027 break;
2028 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002029 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002030 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2031 if (rhs.IsConstant()) {
2032 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2033 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2034 } else if (rhs.IsDoubleStackSlot()) {
2035 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2036 } else {
2037 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2038 }
2039 GenerateFPJumps(cond, &true_label, &false_label);
2040 break;
2041 }
2042 }
2043
2044 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002045 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002046
Roland Levillain4fa13f62015-07-06 18:11:54 +01002047 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002048 __ Bind(&false_label);
2049 __ xorl(reg, reg);
2050 __ jmp(&done_label);
2051
Roland Levillain4fa13f62015-07-06 18:11:54 +01002052 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002053 __ Bind(&true_label);
2054 __ movl(reg, Immediate(1));
2055 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002056}
2057
2058void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002059 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002060}
2061
2062void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002063 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002064}
2065
2066void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002067 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002068}
2069
2070void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002071 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002072}
2073
2074void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002075 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002076}
2077
2078void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002079 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002080}
2081
2082void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002083 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002084}
2085
2086void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002087 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002088}
2089
2090void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002091 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002092}
2093
2094void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002095 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002096}
2097
2098void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002099 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002100}
2101
2102void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002103 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002104}
2105
Aart Bike9f37602015-10-09 11:15:55 -07002106void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002107 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002108}
2109
2110void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002111 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002112}
2113
2114void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002115 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002116}
2117
2118void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002119 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002120}
2121
2122void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002123 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002124}
2125
2126void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002127 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002128}
2129
2130void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002131 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002132}
2133
2134void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002135 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002136}
2137
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002138void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002139 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002140 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002141 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002142 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002143 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002145 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002146 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002147 case DataType::Type::kInt32:
2148 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002149 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002150 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002151 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2152 break;
2153 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002154 case DataType::Type::kFloat32:
2155 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002156 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002157 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002158 locations->SetOut(Location::RequiresRegister());
2159 break;
2160 }
2161 default:
2162 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2163 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002164}
2165
2166void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002167 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002168 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002169 Location left = locations->InAt(0);
2170 Location right = locations->InAt(1);
2171
Mark Mendell0c9497d2015-08-21 09:30:05 -04002172 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002174 Condition less_cond = kLess;
2175
Calin Juravleddb7df22014-11-25 20:56:51 +00002176 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002177 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002178 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002179 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002180 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002181 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002182 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002183 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002184 break;
2185 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002186 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002187 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002188 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002189 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002190 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002191 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2192 if (right.IsConstant()) {
2193 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2194 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2195 } else if (right.IsStackSlot()) {
2196 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2197 } else {
2198 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2199 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002200 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002201 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002202 break;
2203 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002204 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002205 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2206 if (right.IsConstant()) {
2207 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2208 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2209 } else if (right.IsDoubleStackSlot()) {
2210 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2211 } else {
2212 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2213 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002214 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002215 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002216 break;
2217 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002218 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002219 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002220 }
Aart Bika19616e2016-02-01 18:57:58 -08002221
Calin Juravleddb7df22014-11-25 20:56:51 +00002222 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002223 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002224 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002225
Calin Juravle91debbc2014-11-26 19:01:09 +00002226 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002227 __ movl(out, Immediate(1));
2228 __ jmp(&done);
2229
2230 __ Bind(&less);
2231 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002232
2233 __ Bind(&done);
2234}
2235
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002236void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002237 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002238 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002239 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002240}
2241
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002242void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002243 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002244}
2245
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002246void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2247 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002248 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002249 locations->SetOut(Location::ConstantLocation(constant));
2250}
2251
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002252void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002253 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002254}
2255
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002256void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002257 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002258 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002259 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002260}
2261
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002262void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002263 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002264}
2265
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002266void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2267 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002268 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002269 locations->SetOut(Location::ConstantLocation(constant));
2270}
2271
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002272void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002273 // Will be generated at use site.
2274}
2275
2276void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2277 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002278 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002279 locations->SetOut(Location::ConstantLocation(constant));
2280}
2281
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002282void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2283 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002284 // Will be generated at use site.
2285}
2286
Igor Murashkind01745e2017-04-05 16:40:31 -07002287void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2288 constructor_fence->SetLocations(nullptr);
2289}
2290
2291void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2292 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2293 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2294}
2295
Calin Juravle27df7582015-04-17 19:12:31 +01002296void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2297 memory_barrier->SetLocations(nullptr);
2298}
2299
2300void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002301 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002302}
2303
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002304void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2305 ret->SetLocations(nullptr);
2306}
2307
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002308void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002309 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002310}
2311
2312void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002313 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002314 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002315 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002316 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002317 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002318 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002319 case DataType::Type::kInt8:
2320 case DataType::Type::kUint16:
2321 case DataType::Type::kInt16:
2322 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002323 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002324 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002325 break;
2326
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002327 case DataType::Type::kFloat32:
2328 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002329 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002330 break;
2331
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002332 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002333 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002334 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002335}
2336
2337void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
2338 if (kIsDebugBuild) {
2339 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002340 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002342 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002343 case DataType::Type::kInt8:
2344 case DataType::Type::kUint16:
2345 case DataType::Type::kInt16:
2346 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002347 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002348 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002349 break;
2350
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002351 case DataType::Type::kFloat32:
2352 case DataType::Type::kFloat64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002353 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002354 XMM0);
2355 break;
2356
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002357 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002358 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002359 }
2360 }
2361 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002362}
2363
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002364Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002365 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002366 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002367 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002368 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002369 case DataType::Type::kInt8:
2370 case DataType::Type::kUint16:
2371 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002372 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002373 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002374 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002375 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002376 return Location::RegisterLocation(RAX);
2377
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002378 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002379 return Location::NoLocation();
2380
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002381 case DataType::Type::kFloat64:
2382 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002383 return Location::FpuRegisterLocation(XMM0);
2384 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002385
2386 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002387}
2388
2389Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2390 return Location::RegisterLocation(kMethodRegisterArgument);
2391}
2392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002393Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002394 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002395 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002396 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002397 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002398 case DataType::Type::kInt8:
2399 case DataType::Type::kUint16:
2400 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002401 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002402 uint32_t index = gp_index_++;
2403 stack_index_++;
2404 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002405 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002406 } else {
2407 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2408 }
2409 }
2410
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002411 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002412 uint32_t index = gp_index_;
2413 stack_index_ += 2;
2414 if (index < calling_convention.GetNumberOfRegisters()) {
2415 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002416 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002417 } else {
2418 gp_index_ += 2;
2419 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2420 }
2421 }
2422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002423 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002424 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002425 stack_index_++;
2426 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002427 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002428 } else {
2429 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2430 }
2431 }
2432
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002433 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002434 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002435 stack_index_ += 2;
2436 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002437 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002438 } else {
2439 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2440 }
2441 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002442
Aart Bik66c158e2018-01-31 12:55:04 -08002443 case DataType::Type::kUint32:
2444 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002445 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002446 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002447 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002448 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002449 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002450}
2451
Calin Juravle175dc732015-08-25 15:42:32 +01002452void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2453 // The trampoline uses the same calling convention as dex calling conventions,
2454 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2455 // the method_idx.
2456 HandleInvoke(invoke);
2457}
2458
2459void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2460 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2461}
2462
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002463void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002464 // Explicit clinit checks triggered by static invokes must have been pruned by
2465 // art::PrepareForRegisterAllocation.
2466 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002467
Mark Mendellfb8d2792015-03-31 22:16:59 -04002468 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002469 if (intrinsic.TryDispatch(invoke)) {
2470 return;
2471 }
2472
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002473 HandleInvoke(invoke);
2474}
2475
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002476static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2477 if (invoke->GetLocations()->Intrinsified()) {
2478 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2479 intrinsic.Dispatch(invoke);
2480 return true;
2481 }
2482 return false;
2483}
2484
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002485void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002486 // Explicit clinit checks triggered by static invokes must have been pruned by
2487 // art::PrepareForRegisterAllocation.
2488 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002489
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002490 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2491 return;
2492 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002493
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002494 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002495 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002496 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002497}
2498
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002499void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002500 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002501 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002502}
2503
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002504void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002505 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002506 if (intrinsic.TryDispatch(invoke)) {
2507 return;
2508 }
2509
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002510 HandleInvoke(invoke);
2511}
2512
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002513void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002514 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2515 return;
2516 }
2517
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002518 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002519 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002520}
2521
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002522void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2523 HandleInvoke(invoke);
2524 // Add the hidden argument.
2525 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2526}
2527
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002528void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(uint32_t dex_pc, CpuRegister klass) {
2529 DCHECK_EQ(RDI, klass.AsRegister());
2530 if (GetCompilerOptions().IsBaseline() && !Runtime::Current()->IsAotCompiler()) {
2531 ScopedObjectAccess soa(Thread::Current());
2532 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
2533 InlineCache* cache = info->GetInlineCache(dex_pc);
2534 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2535 NearLabel done;
2536 __ movq(CpuRegister(TMP), Immediate(address));
2537 // Fast path for a monomorphic cache.
2538 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2539 __ j(kEqual, &done);
2540 GenerateInvokeRuntime(
2541 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2542 __ Bind(&done);
2543 }
2544}
2545
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002546void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2547 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002548 LocationSummary* locations = invoke->GetLocations();
2549 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2550 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002551 Location receiver = locations->InAt(0);
2552 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2553
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002554 if (receiver.IsStackSlot()) {
2555 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002556 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002557 __ movl(temp, Address(temp, class_offset));
2558 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002559 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002560 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002561 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002562 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002563 // Instead of simply (possibly) unpoisoning `temp` here, we should
2564 // emit a read barrier for the previous class reference load.
2565 // However this is not required in practice, as this is an
2566 // intermediate/temporary reference and because the current
2567 // concurrent copying collector keeps the from-space memory
2568 // intact/accessible until the end of the marking phase (the
2569 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002570 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002571
2572 codegen_->MaybeGenerateInlineCacheCheck(invoke->GetDexPc(), temp);
2573
2574 // Set the hidden argument. This is safe to do this here, as RAX
2575 // won't be modified thereafter, before the `call` instruction.
2576 // We also di it after MaybeGenerateInlineCache that may use RAX.
2577 DCHECK_EQ(RAX, hidden_reg.AsRegister());
2578 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
2579
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002580 // temp = temp->GetAddressOfIMT()
2581 __ movq(temp,
2582 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2583 // temp = temp->GetImtEntryAt(method_offset);
2584 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002585 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002586 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002587 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002588 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002589 __ call(Address(
2590 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002591
2592 DCHECK(!codegen_->IsLeafMethod());
2593 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2594}
2595
Orion Hodsonac141392017-01-13 11:53:47 +00002596void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2597 HandleInvoke(invoke);
2598}
2599
2600void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2601 codegen_->GenerateInvokePolymorphicCall(invoke);
2602}
2603
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002604void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2605 HandleInvoke(invoke);
2606}
2607
2608void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2609 codegen_->GenerateInvokeCustomCall(invoke);
2610}
2611
Roland Levillain88cb1752014-10-20 16:36:47 +01002612void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2613 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002614 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002615 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002616 case DataType::Type::kInt32:
2617 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002618 locations->SetInAt(0, Location::RequiresRegister());
2619 locations->SetOut(Location::SameAsFirstInput());
2620 break;
2621
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002622 case DataType::Type::kFloat32:
2623 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002624 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002625 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002626 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002627 break;
2628
2629 default:
2630 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2631 }
2632}
2633
2634void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2635 LocationSummary* locations = neg->GetLocations();
2636 Location out = locations->Out();
2637 Location in = locations->InAt(0);
2638 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002639 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002640 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002641 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002642 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002643 break;
2644
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002645 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002646 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002647 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002648 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002649 break;
2650
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002651 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002652 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002653 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002654 // Implement float negation with an exclusive or with value
2655 // 0x80000000 (mask for bit 31, representing the sign of a
2656 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002657 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002658 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002659 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002660 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002661
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002662 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002663 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002664 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002665 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002666 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002667 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002668 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002669 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002670 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002671 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002672
2673 default:
2674 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2675 }
2676}
2677
Roland Levillaindff1f282014-11-05 14:15:05 +00002678void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2679 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002680 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 DataType::Type result_type = conversion->GetResultType();
2682 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002683 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2684 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002685
Roland Levillaindff1f282014-11-05 14:15:05 +00002686 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002687 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002688 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002689 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002690 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002691 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2692 locations->SetInAt(0, Location::Any());
2693 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002694 break;
2695
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002696 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002697 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002698 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002699 locations->SetInAt(0, Location::Any());
2700 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2701 break;
2702
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002703 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002704 locations->SetInAt(0, Location::RequiresFpuRegister());
2705 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002706 break;
2707
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002708 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002709 locations->SetInAt(0, Location::RequiresFpuRegister());
2710 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002711 break;
2712
2713 default:
2714 LOG(FATAL) << "Unexpected type conversion from " << input_type
2715 << " to " << result_type;
2716 }
2717 break;
2718
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002719 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002720 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002721 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002722 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002723 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002724 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002725 case DataType::Type::kInt16:
2726 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002727 // TODO: We would benefit from a (to-be-implemented)
2728 // Location::RegisterOrStackSlot requirement for this input.
2729 locations->SetInAt(0, Location::RequiresRegister());
2730 locations->SetOut(Location::RequiresRegister());
2731 break;
2732
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002733 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002734 locations->SetInAt(0, Location::RequiresFpuRegister());
2735 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002736 break;
2737
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002738 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002739 locations->SetInAt(0, Location::RequiresFpuRegister());
2740 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002741 break;
2742
2743 default:
2744 LOG(FATAL) << "Unexpected type conversion from " << input_type
2745 << " to " << result_type;
2746 }
2747 break;
2748
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002749 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002750 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002751 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002752 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002753 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002754 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002755 case DataType::Type::kInt16:
2756 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002757 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002758 locations->SetOut(Location::RequiresFpuRegister());
2759 break;
2760
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002761 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002762 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002763 locations->SetOut(Location::RequiresFpuRegister());
2764 break;
2765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002766 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002767 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002768 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002769 break;
2770
2771 default:
2772 LOG(FATAL) << "Unexpected type conversion from " << input_type
2773 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002774 }
Roland Levillaincff13742014-11-17 14:32:17 +00002775 break;
2776
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002777 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002778 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002779 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002780 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002781 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002782 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002783 case DataType::Type::kInt16:
2784 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002785 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002786 locations->SetOut(Location::RequiresFpuRegister());
2787 break;
2788
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002789 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002790 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002791 locations->SetOut(Location::RequiresFpuRegister());
2792 break;
2793
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002794 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002795 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002796 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002797 break;
2798
2799 default:
2800 LOG(FATAL) << "Unexpected type conversion from " << input_type
2801 << " to " << result_type;
2802 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002803 break;
2804
2805 default:
2806 LOG(FATAL) << "Unexpected type conversion from " << input_type
2807 << " to " << result_type;
2808 }
2809}
2810
2811void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2812 LocationSummary* locations = conversion->GetLocations();
2813 Location out = locations->Out();
2814 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002815 DataType::Type result_type = conversion->GetResultType();
2816 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002817 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2818 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002819 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002820 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002821 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002822 case DataType::Type::kInt8:
2823 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002824 case DataType::Type::kInt16:
2825 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002826 case DataType::Type::kInt64:
2827 if (in.IsRegister()) {
2828 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2829 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2830 __ movzxb(out.AsRegister<CpuRegister>(),
2831 Address(CpuRegister(RSP), in.GetStackIndex()));
2832 } else {
2833 __ movl(out.AsRegister<CpuRegister>(),
2834 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2835 }
2836 break;
2837
2838 default:
2839 LOG(FATAL) << "Unexpected type conversion from " << input_type
2840 << " to " << result_type;
2841 }
2842 break;
2843
2844 case DataType::Type::kInt8:
2845 switch (input_type) {
2846 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002847 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002848 case DataType::Type::kInt16:
2849 case DataType::Type::kInt32:
2850 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002851 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002852 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002853 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002854 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002855 Address(CpuRegister(RSP), in.GetStackIndex()));
2856 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002857 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002858 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002859 }
2860 break;
2861
2862 default:
2863 LOG(FATAL) << "Unexpected type conversion from " << input_type
2864 << " to " << result_type;
2865 }
2866 break;
2867
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002868 case DataType::Type::kUint16:
2869 switch (input_type) {
2870 case DataType::Type::kInt8:
2871 case DataType::Type::kInt16:
2872 case DataType::Type::kInt32:
2873 case DataType::Type::kInt64:
2874 if (in.IsRegister()) {
2875 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2876 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2877 __ movzxw(out.AsRegister<CpuRegister>(),
2878 Address(CpuRegister(RSP), in.GetStackIndex()));
2879 } else {
2880 __ movl(out.AsRegister<CpuRegister>(),
2881 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2882 }
2883 break;
2884
2885 default:
2886 LOG(FATAL) << "Unexpected type conversion from " << input_type
2887 << " to " << result_type;
2888 }
2889 break;
2890
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002891 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002892 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002893 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002894 case DataType::Type::kInt32:
2895 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002896 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002897 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002898 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002899 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002900 Address(CpuRegister(RSP), in.GetStackIndex()));
2901 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002902 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002903 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002904 }
2905 break;
2906
2907 default:
2908 LOG(FATAL) << "Unexpected type conversion from " << input_type
2909 << " to " << result_type;
2910 }
2911 break;
2912
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002913 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002914 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002915 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002916 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002917 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002918 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002919 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002920 Address(CpuRegister(RSP), in.GetStackIndex()));
2921 } else {
2922 DCHECK(in.IsConstant());
2923 DCHECK(in.GetConstant()->IsLongConstant());
2924 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002925 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002926 }
2927 break;
2928
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002929 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002930 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2931 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002932 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002933
2934 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002935 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07002936 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002937 __ j(kAboveEqual, &done);
2938 // if input == NaN goto nan
2939 __ j(kUnordered, &nan);
2940 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002941 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002942 __ jmp(&done);
2943 __ Bind(&nan);
2944 // output = 0
2945 __ xorl(output, output);
2946 __ Bind(&done);
2947 break;
2948 }
2949
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002950 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002951 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2952 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002953 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002954
2955 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002956 // if input >= (double)INT_MAX goto done
2957 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002958 __ j(kAboveEqual, &done);
2959 // if input == NaN goto nan
2960 __ j(kUnordered, &nan);
2961 // output = double-to-int-truncate(input)
2962 __ cvttsd2si(output, input);
2963 __ jmp(&done);
2964 __ Bind(&nan);
2965 // output = 0
2966 __ xorl(output, output);
2967 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002968 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002969 }
Roland Levillain946e1432014-11-11 17:35:19 +00002970
2971 default:
2972 LOG(FATAL) << "Unexpected type conversion from " << input_type
2973 << " to " << result_type;
2974 }
2975 break;
2976
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002977 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002978 switch (input_type) {
2979 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002980 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002981 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002982 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002983 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002984 case DataType::Type::kInt16:
2985 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002986 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00002987 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00002988 break;
2989
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002990 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00002991 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2992 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002993 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00002994
Mark Mendell92e83bf2015-05-07 11:25:03 -04002995 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04002996 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07002997 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00002998 __ j(kAboveEqual, &done);
2999 // if input == NaN goto nan
3000 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003001 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003002 __ cvttss2si(output, input, true);
3003 __ jmp(&done);
3004 __ Bind(&nan);
3005 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003006 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003007 __ Bind(&done);
3008 break;
3009 }
3010
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003011 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003012 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3013 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003014 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003015
Mark Mendell92e83bf2015-05-07 11:25:03 -04003016 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003017 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003018 __ comisd(input, codegen_->LiteralDoubleAddress(
3019 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003020 __ j(kAboveEqual, &done);
3021 // if input == NaN goto nan
3022 __ j(kUnordered, &nan);
3023 // output = double-to-long-truncate(input)
3024 __ cvttsd2si(output, input, true);
3025 __ jmp(&done);
3026 __ Bind(&nan);
3027 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003028 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003029 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003030 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003031 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003032
3033 default:
3034 LOG(FATAL) << "Unexpected type conversion from " << input_type
3035 << " to " << result_type;
3036 }
3037 break;
3038
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003039 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003040 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003041 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003042 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003043 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003044 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003045 case DataType::Type::kInt16:
3046 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003047 if (in.IsRegister()) {
3048 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3049 } else if (in.IsConstant()) {
3050 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3051 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003052 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003053 } else {
3054 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3055 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3056 }
Roland Levillaincff13742014-11-17 14:32:17 +00003057 break;
3058
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003059 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003060 if (in.IsRegister()) {
3061 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3062 } else if (in.IsConstant()) {
3063 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3064 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003065 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003066 } else {
3067 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3068 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3069 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003070 break;
3071
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003072 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003073 if (in.IsFpuRegister()) {
3074 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3075 } else if (in.IsConstant()) {
3076 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3077 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003078 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003079 } else {
3080 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3081 Address(CpuRegister(RSP), in.GetStackIndex()));
3082 }
Roland Levillaincff13742014-11-17 14:32:17 +00003083 break;
3084
3085 default:
3086 LOG(FATAL) << "Unexpected type conversion from " << input_type
3087 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003088 }
Roland Levillaincff13742014-11-17 14:32:17 +00003089 break;
3090
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003091 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003092 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003093 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003094 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003095 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003096 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003097 case DataType::Type::kInt16:
3098 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003099 if (in.IsRegister()) {
3100 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3101 } else if (in.IsConstant()) {
3102 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3103 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003104 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003105 } else {
3106 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3107 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3108 }
Roland Levillaincff13742014-11-17 14:32:17 +00003109 break;
3110
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003111 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003112 if (in.IsRegister()) {
3113 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3114 } else if (in.IsConstant()) {
3115 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3116 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003117 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003118 } else {
3119 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3120 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3121 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003122 break;
3123
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003124 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003125 if (in.IsFpuRegister()) {
3126 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3127 } else if (in.IsConstant()) {
3128 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3129 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003130 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003131 } else {
3132 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3133 Address(CpuRegister(RSP), in.GetStackIndex()));
3134 }
Roland Levillaincff13742014-11-17 14:32:17 +00003135 break;
3136
3137 default:
3138 LOG(FATAL) << "Unexpected type conversion from " << input_type
3139 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003140 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003141 break;
3142
3143 default:
3144 LOG(FATAL) << "Unexpected type conversion from " << input_type
3145 << " to " << result_type;
3146 }
3147}
3148
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003149void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003150 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003151 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003152 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003153 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003154 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003155 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3156 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003157 break;
3158 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003159
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003160 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003161 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003162 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003163 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003164 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003165 break;
3166 }
3167
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003168 case DataType::Type::kFloat64:
3169 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003170 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003171 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003172 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003173 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003174 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003175
3176 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003177 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003178 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003179}
3180
3181void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3182 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003183 Location first = locations->InAt(0);
3184 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003185 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003186
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003187 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003188 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003189 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003190 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3191 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003192 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3193 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003194 } else {
3195 __ leal(out.AsRegister<CpuRegister>(), Address(
3196 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3197 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003198 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003199 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3200 __ addl(out.AsRegister<CpuRegister>(),
3201 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3202 } else {
3203 __ leal(out.AsRegister<CpuRegister>(), Address(
3204 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3205 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003206 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003207 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003208 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003209 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003210 break;
3211 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003212
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003213 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003214 if (second.IsRegister()) {
3215 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3216 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003217 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3218 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003219 } else {
3220 __ leaq(out.AsRegister<CpuRegister>(), Address(
3221 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3222 }
3223 } else {
3224 DCHECK(second.IsConstant());
3225 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3226 int32_t int32_value = Low32Bits(value);
3227 DCHECK_EQ(int32_value, value);
3228 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3229 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3230 } else {
3231 __ leaq(out.AsRegister<CpuRegister>(), Address(
3232 first.AsRegister<CpuRegister>(), int32_value));
3233 }
3234 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003235 break;
3236 }
3237
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003238 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003239 if (second.IsFpuRegister()) {
3240 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3241 } else if (second.IsConstant()) {
3242 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003243 codegen_->LiteralFloatAddress(
3244 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003245 } else {
3246 DCHECK(second.IsStackSlot());
3247 __ addss(first.AsFpuRegister<XmmRegister>(),
3248 Address(CpuRegister(RSP), second.GetStackIndex()));
3249 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003250 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003251 }
3252
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003253 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003254 if (second.IsFpuRegister()) {
3255 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3256 } else if (second.IsConstant()) {
3257 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003258 codegen_->LiteralDoubleAddress(
3259 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003260 } else {
3261 DCHECK(second.IsDoubleStackSlot());
3262 __ addsd(first.AsFpuRegister<XmmRegister>(),
3263 Address(CpuRegister(RSP), second.GetStackIndex()));
3264 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003265 break;
3266 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003267
3268 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003269 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003270 }
3271}
3272
3273void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003274 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003275 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003276 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003277 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003278 locations->SetInAt(0, Location::RequiresRegister());
3279 locations->SetInAt(1, Location::Any());
3280 locations->SetOut(Location::SameAsFirstInput());
3281 break;
3282 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003283 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003284 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003285 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003286 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003287 break;
3288 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003289 case DataType::Type::kFloat32:
3290 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003291 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003292 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003293 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003294 break;
Calin Juravle11351682014-10-23 15:38:15 +01003295 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003296 default:
Calin Juravle11351682014-10-23 15:38:15 +01003297 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003298 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003299}
3300
3301void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3302 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003303 Location first = locations->InAt(0);
3304 Location second = locations->InAt(1);
3305 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003306 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003307 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003308 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003309 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003310 } else if (second.IsConstant()) {
3311 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003312 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003313 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003314 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003315 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003316 break;
3317 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003318 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003319 if (second.IsConstant()) {
3320 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3321 DCHECK(IsInt<32>(value));
3322 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3323 } else {
3324 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3325 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003326 break;
3327 }
3328
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003329 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003330 if (second.IsFpuRegister()) {
3331 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3332 } else if (second.IsConstant()) {
3333 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003334 codegen_->LiteralFloatAddress(
3335 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003336 } else {
3337 DCHECK(second.IsStackSlot());
3338 __ subss(first.AsFpuRegister<XmmRegister>(),
3339 Address(CpuRegister(RSP), second.GetStackIndex()));
3340 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003341 break;
Calin Juravle11351682014-10-23 15:38:15 +01003342 }
3343
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003344 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003345 if (second.IsFpuRegister()) {
3346 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3347 } else if (second.IsConstant()) {
3348 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003349 codegen_->LiteralDoubleAddress(
3350 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003351 } else {
3352 DCHECK(second.IsDoubleStackSlot());
3353 __ subsd(first.AsFpuRegister<XmmRegister>(),
3354 Address(CpuRegister(RSP), second.GetStackIndex()));
3355 }
Calin Juravle11351682014-10-23 15:38:15 +01003356 break;
3357 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003358
3359 default:
Calin Juravle11351682014-10-23 15:38:15 +01003360 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003361 }
3362}
3363
Calin Juravle34bacdf2014-10-07 20:23:36 +01003364void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3365 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003366 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003367 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003368 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003369 locations->SetInAt(0, Location::RequiresRegister());
3370 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003371 if (mul->InputAt(1)->IsIntConstant()) {
3372 // Can use 3 operand multiply.
3373 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3374 } else {
3375 locations->SetOut(Location::SameAsFirstInput());
3376 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003377 break;
3378 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003379 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003380 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003381 locations->SetInAt(1, Location::Any());
3382 if (mul->InputAt(1)->IsLongConstant() &&
3383 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003384 // Can use 3 operand multiply.
3385 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3386 } else {
3387 locations->SetOut(Location::SameAsFirstInput());
3388 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003389 break;
3390 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003391 case DataType::Type::kFloat32:
3392 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003393 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003394 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003395 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003396 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003397 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003398
3399 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003400 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003401 }
3402}
3403
3404void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3405 LocationSummary* locations = mul->GetLocations();
3406 Location first = locations->InAt(0);
3407 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003408 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003409 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003410 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003411 // The constant may have ended up in a register, so test explicitly to avoid
3412 // problems where the output may not be the same as the first operand.
3413 if (mul->InputAt(1)->IsIntConstant()) {
3414 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3415 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3416 } else if (second.IsRegister()) {
3417 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003418 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003419 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003420 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003421 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003422 __ imull(first.AsRegister<CpuRegister>(),
3423 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003424 }
3425 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003426 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003427 // The constant may have ended up in a register, so test explicitly to avoid
3428 // problems where the output may not be the same as the first operand.
3429 if (mul->InputAt(1)->IsLongConstant()) {
3430 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3431 if (IsInt<32>(value)) {
3432 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3433 Immediate(static_cast<int32_t>(value)));
3434 } else {
3435 // Have to use the constant area.
3436 DCHECK(first.Equals(out));
3437 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3438 }
3439 } else if (second.IsRegister()) {
3440 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003441 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003442 } else {
3443 DCHECK(second.IsDoubleStackSlot());
3444 DCHECK(first.Equals(out));
3445 __ imulq(first.AsRegister<CpuRegister>(),
3446 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003447 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003448 break;
3449 }
3450
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003451 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003452 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003453 if (second.IsFpuRegister()) {
3454 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3455 } else if (second.IsConstant()) {
3456 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003457 codegen_->LiteralFloatAddress(
3458 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003459 } else {
3460 DCHECK(second.IsStackSlot());
3461 __ mulss(first.AsFpuRegister<XmmRegister>(),
3462 Address(CpuRegister(RSP), second.GetStackIndex()));
3463 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003464 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003465 }
3466
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003467 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003468 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003469 if (second.IsFpuRegister()) {
3470 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3471 } else if (second.IsConstant()) {
3472 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003473 codegen_->LiteralDoubleAddress(
3474 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003475 } else {
3476 DCHECK(second.IsDoubleStackSlot());
3477 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3478 Address(CpuRegister(RSP), second.GetStackIndex()));
3479 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003480 break;
3481 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003482
3483 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003484 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003485 }
3486}
3487
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003488void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3489 uint32_t stack_adjustment, bool is_float) {
3490 if (source.IsStackSlot()) {
3491 DCHECK(is_float);
3492 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3493 } else if (source.IsDoubleStackSlot()) {
3494 DCHECK(!is_float);
3495 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3496 } else {
3497 // Write the value to the temporary location on the stack and load to FP stack.
3498 if (is_float) {
3499 Location stack_temp = Location::StackSlot(temp_offset);
3500 codegen_->Move(stack_temp, source);
3501 __ flds(Address(CpuRegister(RSP), temp_offset));
3502 } else {
3503 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3504 codegen_->Move(stack_temp, source);
3505 __ fldl(Address(CpuRegister(RSP), temp_offset));
3506 }
3507 }
3508}
3509
3510void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003511 DataType::Type type = rem->GetResultType();
3512 bool is_float = type == DataType::Type::kFloat32;
3513 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003514 LocationSummary* locations = rem->GetLocations();
3515 Location first = locations->InAt(0);
3516 Location second = locations->InAt(1);
3517 Location out = locations->Out();
3518
3519 // Create stack space for 2 elements.
3520 // TODO: enhance register allocator to ask for stack temporaries.
3521 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3522
3523 // Load the values to the FP stack in reverse order, using temporaries if needed.
3524 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3525 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3526
3527 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003528 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003529 __ Bind(&retry);
3530 __ fprem();
3531
3532 // Move FP status to AX.
3533 __ fstsw();
3534
3535 // And see if the argument reduction is complete. This is signaled by the
3536 // C2 FPU flag bit set to 0.
3537 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3538 __ j(kNotEqual, &retry);
3539
3540 // We have settled on the final value. Retrieve it into an XMM register.
3541 // Store FP top of stack to real stack.
3542 if (is_float) {
3543 __ fsts(Address(CpuRegister(RSP), 0));
3544 } else {
3545 __ fstl(Address(CpuRegister(RSP), 0));
3546 }
3547
3548 // Pop the 2 items from the FP stack.
3549 __ fucompp();
3550
3551 // Load the value from the stack into an XMM register.
3552 DCHECK(out.IsFpuRegister()) << out;
3553 if (is_float) {
3554 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3555 } else {
3556 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3557 }
3558
3559 // And remove the temporary stack space we allocated.
3560 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3561}
3562
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003563void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3564 DCHECK(instruction->IsDiv() || instruction->IsRem());
3565
3566 LocationSummary* locations = instruction->GetLocations();
3567 Location second = locations->InAt(1);
3568 DCHECK(second.IsConstant());
3569
3570 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3571 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003572 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003573
3574 DCHECK(imm == 1 || imm == -1);
3575
3576 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003577 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 if (instruction->IsRem()) {
3579 __ xorl(output_register, output_register);
3580 } else {
3581 __ movl(output_register, input_register);
3582 if (imm == -1) {
3583 __ negl(output_register);
3584 }
3585 }
3586 break;
3587 }
3588
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003589 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003590 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003591 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003592 } else {
3593 __ movq(output_register, input_register);
3594 if (imm == -1) {
3595 __ negq(output_register);
3596 }
3597 }
3598 break;
3599 }
3600
3601 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003602 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003603 }
3604}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303605void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3606 LocationSummary* locations = instruction->GetLocations();
3607 Location second = locations->InAt(1);
3608 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3609 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3610 int64_t imm = Int64FromConstant(second.GetConstant());
3611 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3612 uint64_t abs_imm = AbsOrMin(imm);
3613 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3614 if (instruction->GetResultType() == DataType::Type::kInt32) {
3615 NearLabel done;
3616 __ movl(out, numerator);
3617 __ andl(out, Immediate(abs_imm-1));
3618 __ j(Condition::kZero, &done);
3619 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3620 __ testl(numerator, numerator);
3621 __ cmov(Condition::kLess, out, tmp, false);
3622 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003623
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303624 } else {
3625 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3626 codegen_->Load64BitValue(tmp, abs_imm - 1);
3627 NearLabel done;
3628
3629 __ movq(out, numerator);
3630 __ andq(out, tmp);
3631 __ j(Condition::kZero, &done);
3632 __ movq(tmp, numerator);
3633 __ sarq(tmp, Immediate(63));
3634 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3635 __ orq(out, tmp);
3636 __ Bind(&done);
3637 }
3638}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003639void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003640 LocationSummary* locations = instruction->GetLocations();
3641 Location second = locations->InAt(1);
3642
3643 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3644 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3645
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003646 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003647 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3648 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003649
3650 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3651
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003652 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303653 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3654 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3655 if (abs_imm == 2) {
3656 __ leal(tmp, Address(numerator, 0));
3657 __ shrl(tmp, Immediate(31));
3658 __ addl(tmp, numerator);
3659 } else {
3660 __ leal(tmp, Address(numerator, abs_imm - 1));
3661 __ testl(numerator, numerator);
3662 __ cmov(kGreaterEqual, tmp, numerator);
3663 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003664 int shift = CTZ(imm);
3665 __ sarl(tmp, Immediate(shift));
3666
3667 if (imm < 0) {
3668 __ negl(tmp);
3669 }
3670
3671 __ movl(output_register, tmp);
3672 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003673 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003674 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303675 if (abs_imm == 2) {
3676 __ movq(rdx, numerator);
3677 __ shrq(rdx, Immediate(63));
3678 __ addq(rdx, numerator);
3679 } else {
3680 codegen_->Load64BitValue(rdx, abs_imm - 1);
3681 __ addq(rdx, numerator);
3682 __ testq(numerator, numerator);
3683 __ cmov(kGreaterEqual, rdx, numerator);
3684 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003685 int shift = CTZ(imm);
3686 __ sarq(rdx, Immediate(shift));
3687
3688 if (imm < 0) {
3689 __ negq(rdx);
3690 }
3691
3692 __ movq(output_register, rdx);
3693 }
3694}
3695
3696void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3697 DCHECK(instruction->IsDiv() || instruction->IsRem());
3698
3699 LocationSummary* locations = instruction->GetLocations();
3700 Location second = locations->InAt(1);
3701
3702 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3703 : locations->GetTemp(0).AsRegister<CpuRegister>();
3704 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3705 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3706 : locations->Out().AsRegister<CpuRegister>();
3707 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3708
3709 DCHECK_EQ(RAX, eax.AsRegister());
3710 DCHECK_EQ(RDX, edx.AsRegister());
3711 if (instruction->IsDiv()) {
3712 DCHECK_EQ(RAX, out.AsRegister());
3713 } else {
3714 DCHECK_EQ(RDX, out.AsRegister());
3715 }
3716
3717 int64_t magic;
3718 int shift;
3719
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003720 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003721 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003722 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3723
Andreas Gampe3db70682018-12-26 15:12:03 -08003724 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003725
3726 __ movl(numerator, eax);
3727
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003728 __ movl(eax, Immediate(magic));
3729 __ imull(numerator);
3730
3731 if (imm > 0 && magic < 0) {
3732 __ addl(edx, numerator);
3733 } else if (imm < 0 && magic > 0) {
3734 __ subl(edx, numerator);
3735 }
3736
3737 if (shift != 0) {
3738 __ sarl(edx, Immediate(shift));
3739 }
3740
3741 __ movl(eax, edx);
3742 __ shrl(edx, Immediate(31));
3743 __ addl(edx, eax);
3744
3745 if (instruction->IsRem()) {
3746 __ movl(eax, numerator);
3747 __ imull(edx, Immediate(imm));
3748 __ subl(eax, edx);
3749 __ movl(edx, eax);
3750 } else {
3751 __ movl(eax, edx);
3752 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003753 } else {
3754 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3755
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003756 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003757
3758 CpuRegister rax = eax;
3759 CpuRegister rdx = edx;
3760
Andreas Gampe3db70682018-12-26 15:12:03 -08003761 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003762
3763 // Save the numerator.
3764 __ movq(numerator, rax);
3765
3766 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003767 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003768
3769 // RDX:RAX = magic * numerator
3770 __ imulq(numerator);
3771
3772 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003773 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003774 __ addq(rdx, numerator);
3775 } else if (imm < 0 && magic > 0) {
3776 // RDX -= numerator
3777 __ subq(rdx, numerator);
3778 }
3779
3780 // Shift if needed.
3781 if (shift != 0) {
3782 __ sarq(rdx, Immediate(shift));
3783 }
3784
3785 // RDX += 1 if RDX < 0
3786 __ movq(rax, rdx);
3787 __ shrq(rdx, Immediate(63));
3788 __ addq(rdx, rax);
3789
3790 if (instruction->IsRem()) {
3791 __ movq(rax, numerator);
3792
3793 if (IsInt<32>(imm)) {
3794 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3795 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003796 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003797 }
3798
3799 __ subq(rax, rdx);
3800 __ movq(rdx, rax);
3801 } else {
3802 __ movq(rax, rdx);
3803 }
3804 }
3805}
3806
Calin Juravlebacfec32014-11-14 15:54:36 +00003807void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3808 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003809 DataType::Type type = instruction->GetResultType();
3810 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003811
3812 bool is_div = instruction->IsDiv();
3813 LocationSummary* locations = instruction->GetLocations();
3814
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003815 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3816 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003817
Roland Levillain271ab9c2014-11-27 15:23:57 +00003818 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003819 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003820
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003821 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003822 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003823
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003824 if (imm == 0) {
3825 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3826 } else if (imm == 1 || imm == -1) {
3827 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303828 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3829 if (is_div) {
3830 DivByPowerOfTwo(instruction->AsDiv());
3831 } else {
3832 RemByPowerOfTwo(instruction->AsRem());
3833 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003834 } else {
3835 DCHECK(imm <= -2 || imm >= 2);
3836 GenerateDivRemWithAnyConstant(instruction);
3837 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003838 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003839 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003840 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003841 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003842 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003843
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003844 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3845 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3846 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3847 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003848 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003849 __ cmpl(second_reg, Immediate(-1));
3850 __ j(kEqual, slow_path->GetEntryLabel());
3851 // edx:eax <- sign-extended of eax
3852 __ cdq();
3853 // eax = quotient, edx = remainder
3854 __ idivl(second_reg);
3855 } else {
3856 __ cmpq(second_reg, Immediate(-1));
3857 __ j(kEqual, slow_path->GetEntryLabel());
3858 // rdx:rax <- sign-extended of rax
3859 __ cqo();
3860 // rax = quotient, rdx = remainder
3861 __ idivq(second_reg);
3862 }
3863 __ Bind(slow_path->GetExitLabel());
3864 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003865}
3866
Calin Juravle7c4954d2014-10-28 16:57:40 +00003867void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3868 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003869 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003870 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003871 case DataType::Type::kInt32:
3872 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003873 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003874 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003875 locations->SetOut(Location::SameAsFirstInput());
3876 // Intel uses edx:eax as the dividend.
3877 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003878 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3879 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3880 // output and request another temp.
3881 if (div->InputAt(1)->IsConstant()) {
3882 locations->AddTemp(Location::RequiresRegister());
3883 }
Calin Juravled0d48522014-11-04 16:40:20 +00003884 break;
3885 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003886
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003887 case DataType::Type::kFloat32:
3888 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003889 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003890 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003891 locations->SetOut(Location::SameAsFirstInput());
3892 break;
3893 }
3894
3895 default:
3896 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3897 }
3898}
3899
3900void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3901 LocationSummary* locations = div->GetLocations();
3902 Location first = locations->InAt(0);
3903 Location second = locations->InAt(1);
3904 DCHECK(first.Equals(locations->Out()));
3905
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003906 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003907 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003908 case DataType::Type::kInt32:
3909 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003910 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003911 break;
3912 }
3913
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003914 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003915 if (second.IsFpuRegister()) {
3916 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3917 } else if (second.IsConstant()) {
3918 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003919 codegen_->LiteralFloatAddress(
3920 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003921 } else {
3922 DCHECK(second.IsStackSlot());
3923 __ divss(first.AsFpuRegister<XmmRegister>(),
3924 Address(CpuRegister(RSP), second.GetStackIndex()));
3925 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003926 break;
3927 }
3928
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003929 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003930 if (second.IsFpuRegister()) {
3931 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3932 } else if (second.IsConstant()) {
3933 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003934 codegen_->LiteralDoubleAddress(
3935 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003936 } else {
3937 DCHECK(second.IsDoubleStackSlot());
3938 __ divsd(first.AsFpuRegister<XmmRegister>(),
3939 Address(CpuRegister(RSP), second.GetStackIndex()));
3940 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003941 break;
3942 }
3943
3944 default:
3945 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3946 }
3947}
3948
Calin Juravlebacfec32014-11-14 15:54:36 +00003949void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003950 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003951 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003952 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003953
3954 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003955 case DataType::Type::kInt32:
3956 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003957 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003958 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003959 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
3960 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003961 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3962 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
3963 // output and request another temp.
3964 if (rem->InputAt(1)->IsConstant()) {
3965 locations->AddTemp(Location::RequiresRegister());
3966 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003967 break;
3968 }
3969
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003970 case DataType::Type::kFloat32:
3971 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003972 locations->SetInAt(0, Location::Any());
3973 locations->SetInAt(1, Location::Any());
3974 locations->SetOut(Location::RequiresFpuRegister());
3975 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003976 break;
3977 }
3978
3979 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003980 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003981 }
3982}
3983
3984void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003985 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00003986 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003987 case DataType::Type::kInt32:
3988 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003989 GenerateDivRemIntegral(rem);
3990 break;
3991 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003992 case DataType::Type::kFloat32:
3993 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003994 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00003995 break;
3996 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003997 default:
3998 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
3999 }
4000}
4001
Aart Bik1f8d51b2018-02-15 10:42:37 -08004002static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4003 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4004 switch (minmax->GetResultType()) {
4005 case DataType::Type::kInt32:
4006 case DataType::Type::kInt64:
4007 locations->SetInAt(0, Location::RequiresRegister());
4008 locations->SetInAt(1, Location::RequiresRegister());
4009 locations->SetOut(Location::SameAsFirstInput());
4010 break;
4011 case DataType::Type::kFloat32:
4012 case DataType::Type::kFloat64:
4013 locations->SetInAt(0, Location::RequiresFpuRegister());
4014 locations->SetInAt(1, Location::RequiresFpuRegister());
4015 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4016 // the second input to be the output (we can simply swap inputs).
4017 locations->SetOut(Location::SameAsFirstInput());
4018 break;
4019 default:
4020 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4021 }
4022}
4023
Aart Bik351df3e2018-03-07 11:54:57 -08004024void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4025 bool is_min,
4026 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004027 Location op1_loc = locations->InAt(0);
4028 Location op2_loc = locations->InAt(1);
4029
4030 // Shortcut for same input locations.
4031 if (op1_loc.Equals(op2_loc)) {
4032 // Can return immediately, as op1_loc == out_loc.
4033 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4034 // a copy here.
4035 DCHECK(locations->Out().Equals(op1_loc));
4036 return;
4037 }
4038
4039 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4040 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4041
4042 // (out := op1)
4043 // out <=? op2
4044 // if out is min jmp done
4045 // out := op2
4046 // done:
4047
4048 if (type == DataType::Type::kInt64) {
4049 __ cmpq(out, op2);
4050 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4051 } else {
4052 DCHECK_EQ(type, DataType::Type::kInt32);
4053 __ cmpl(out, op2);
4054 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4055 }
4056}
4057
4058void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4059 bool is_min,
4060 DataType::Type type) {
4061 Location op1_loc = locations->InAt(0);
4062 Location op2_loc = locations->InAt(1);
4063 Location out_loc = locations->Out();
4064 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4065
4066 // Shortcut for same input locations.
4067 if (op1_loc.Equals(op2_loc)) {
4068 DCHECK(out_loc.Equals(op1_loc));
4069 return;
4070 }
4071
4072 // (out := op1)
4073 // out <=? op2
4074 // if Nan jmp Nan_label
4075 // if out is min jmp done
4076 // if op2 is min jmp op2_label
4077 // handle -0/+0
4078 // jmp done
4079 // Nan_label:
4080 // out := NaN
4081 // op2_label:
4082 // out := op2
4083 // done:
4084 //
4085 // This removes one jmp, but needs to copy one input (op1) to out.
4086 //
4087 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4088
4089 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4090
4091 NearLabel nan, done, op2_label;
4092 if (type == DataType::Type::kFloat64) {
4093 __ ucomisd(out, op2);
4094 } else {
4095 DCHECK_EQ(type, DataType::Type::kFloat32);
4096 __ ucomiss(out, op2);
4097 }
4098
4099 __ j(Condition::kParityEven, &nan);
4100
4101 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4102 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4103
4104 // Handle 0.0/-0.0.
4105 if (is_min) {
4106 if (type == DataType::Type::kFloat64) {
4107 __ orpd(out, op2);
4108 } else {
4109 __ orps(out, op2);
4110 }
4111 } else {
4112 if (type == DataType::Type::kFloat64) {
4113 __ andpd(out, op2);
4114 } else {
4115 __ andps(out, op2);
4116 }
4117 }
4118 __ jmp(&done);
4119
4120 // NaN handling.
4121 __ Bind(&nan);
4122 if (type == DataType::Type::kFloat64) {
4123 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4124 } else {
4125 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4126 }
4127 __ jmp(&done);
4128
4129 // out := op2;
4130 __ Bind(&op2_label);
4131 if (type == DataType::Type::kFloat64) {
4132 __ movsd(out, op2);
4133 } else {
4134 __ movss(out, op2);
4135 }
4136
4137 // Done.
4138 __ Bind(&done);
4139}
4140
Aart Bik351df3e2018-03-07 11:54:57 -08004141void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4142 DataType::Type type = minmax->GetResultType();
4143 switch (type) {
4144 case DataType::Type::kInt32:
4145 case DataType::Type::kInt64:
4146 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4147 break;
4148 case DataType::Type::kFloat32:
4149 case DataType::Type::kFloat64:
4150 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4151 break;
4152 default:
4153 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4154 }
4155}
4156
Aart Bik1f8d51b2018-02-15 10:42:37 -08004157void LocationsBuilderX86_64::VisitMin(HMin* min) {
4158 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4159}
4160
4161void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004162 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004163}
4164
4165void LocationsBuilderX86_64::VisitMax(HMax* max) {
4166 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4167}
4168
4169void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004170 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004171}
4172
Aart Bik3dad3412018-02-28 12:01:46 -08004173void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4174 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4175 switch (abs->GetResultType()) {
4176 case DataType::Type::kInt32:
4177 case DataType::Type::kInt64:
4178 locations->SetInAt(0, Location::RequiresRegister());
4179 locations->SetOut(Location::SameAsFirstInput());
4180 locations->AddTemp(Location::RequiresRegister());
4181 break;
4182 case DataType::Type::kFloat32:
4183 case DataType::Type::kFloat64:
4184 locations->SetInAt(0, Location::RequiresFpuRegister());
4185 locations->SetOut(Location::SameAsFirstInput());
4186 locations->AddTemp(Location::RequiresFpuRegister());
4187 break;
4188 default:
4189 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4190 }
4191}
4192
4193void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4194 LocationSummary* locations = abs->GetLocations();
4195 switch (abs->GetResultType()) {
4196 case DataType::Type::kInt32: {
4197 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4198 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4199 // Create mask.
4200 __ movl(mask, out);
4201 __ sarl(mask, Immediate(31));
4202 // Add mask.
4203 __ addl(out, mask);
4204 __ xorl(out, mask);
4205 break;
4206 }
4207 case DataType::Type::kInt64: {
4208 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4209 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4210 // Create mask.
4211 __ movq(mask, out);
4212 __ sarq(mask, Immediate(63));
4213 // Add mask.
4214 __ addq(out, mask);
4215 __ xorq(out, mask);
4216 break;
4217 }
4218 case DataType::Type::kFloat32: {
4219 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4220 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4221 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4222 __ andps(out, mask);
4223 break;
4224 }
4225 case DataType::Type::kFloat64: {
4226 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4227 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4228 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4229 __ andpd(out, mask);
4230 break;
4231 }
4232 default:
4233 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4234 }
4235}
4236
Calin Juravled0d48522014-11-04 16:40:20 +00004237void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004238 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004239 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004240}
4241
4242void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004243 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004244 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004245 codegen_->AddSlowPath(slow_path);
4246
4247 LocationSummary* locations = instruction->GetLocations();
4248 Location value = locations->InAt(0);
4249
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004250 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004251 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004252 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004253 case DataType::Type::kInt8:
4254 case DataType::Type::kUint16:
4255 case DataType::Type::kInt16:
4256 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004257 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004258 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004259 __ j(kEqual, slow_path->GetEntryLabel());
4260 } else if (value.IsStackSlot()) {
4261 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4262 __ j(kEqual, slow_path->GetEntryLabel());
4263 } else {
4264 DCHECK(value.IsConstant()) << value;
4265 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004266 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004267 }
4268 }
4269 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004270 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004271 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004272 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004273 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004274 __ j(kEqual, slow_path->GetEntryLabel());
4275 } else if (value.IsDoubleStackSlot()) {
4276 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4277 __ j(kEqual, slow_path->GetEntryLabel());
4278 } else {
4279 DCHECK(value.IsConstant()) << value;
4280 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004281 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004282 }
4283 }
4284 break;
4285 }
4286 default:
4287 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004288 }
Calin Juravled0d48522014-11-04 16:40:20 +00004289}
4290
Calin Juravle9aec02f2014-11-18 23:06:35 +00004291void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4292 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4293
4294 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004295 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004296
4297 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004298 case DataType::Type::kInt32:
4299 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004300 locations->SetInAt(0, Location::RequiresRegister());
4301 // The shift count needs to be in CL.
4302 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4303 locations->SetOut(Location::SameAsFirstInput());
4304 break;
4305 }
4306 default:
4307 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4308 }
4309}
4310
4311void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4312 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4313
4314 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004315 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004316 Location second = locations->InAt(1);
4317
4318 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004319 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004320 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004321 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004322 if (op->IsShl()) {
4323 __ shll(first_reg, second_reg);
4324 } else if (op->IsShr()) {
4325 __ sarl(first_reg, second_reg);
4326 } else {
4327 __ shrl(first_reg, second_reg);
4328 }
4329 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004330 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004331 if (op->IsShl()) {
4332 __ shll(first_reg, imm);
4333 } else if (op->IsShr()) {
4334 __ sarl(first_reg, imm);
4335 } else {
4336 __ shrl(first_reg, imm);
4337 }
4338 }
4339 break;
4340 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004341 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004342 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004343 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004344 if (op->IsShl()) {
4345 __ shlq(first_reg, second_reg);
4346 } else if (op->IsShr()) {
4347 __ sarq(first_reg, second_reg);
4348 } else {
4349 __ shrq(first_reg, second_reg);
4350 }
4351 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004352 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004353 if (op->IsShl()) {
4354 __ shlq(first_reg, imm);
4355 } else if (op->IsShr()) {
4356 __ sarq(first_reg, imm);
4357 } else {
4358 __ shrq(first_reg, imm);
4359 }
4360 }
4361 break;
4362 }
4363 default:
4364 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004365 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004366 }
4367}
4368
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004369void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4370 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004371 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004372
4373 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004374 case DataType::Type::kInt32:
4375 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004376 locations->SetInAt(0, Location::RequiresRegister());
4377 // The shift count needs to be in CL (unless it is a constant).
4378 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4379 locations->SetOut(Location::SameAsFirstInput());
4380 break;
4381 }
4382 default:
4383 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4384 UNREACHABLE();
4385 }
4386}
4387
4388void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4389 LocationSummary* locations = ror->GetLocations();
4390 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4391 Location second = locations->InAt(1);
4392
4393 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004394 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004395 if (second.IsRegister()) {
4396 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4397 __ rorl(first_reg, second_reg);
4398 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004399 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004400 __ rorl(first_reg, imm);
4401 }
4402 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004403 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004404 if (second.IsRegister()) {
4405 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4406 __ rorq(first_reg, second_reg);
4407 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004408 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004409 __ rorq(first_reg, imm);
4410 }
4411 break;
4412 default:
4413 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4414 UNREACHABLE();
4415 }
4416}
4417
Calin Juravle9aec02f2014-11-18 23:06:35 +00004418void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4419 HandleShift(shl);
4420}
4421
4422void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4423 HandleShift(shl);
4424}
4425
4426void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4427 HandleShift(shr);
4428}
4429
4430void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4431 HandleShift(shr);
4432}
4433
4434void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4435 HandleShift(ushr);
4436}
4437
4438void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4439 HandleShift(ushr);
4440}
4441
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004442void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004443 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4444 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004445 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004446 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004447 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004448}
4449
4450void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004451 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4452 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4453 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004454}
4455
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004456void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004457 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4458 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004459 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004460 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004461 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4462 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004463}
4464
4465void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004466 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4467 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004468 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004469 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004470 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004471}
4472
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004473void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004474 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004475 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004476 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4477 if (location.IsStackSlot()) {
4478 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4479 } else if (location.IsDoubleStackSlot()) {
4480 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4481 }
4482 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004483}
4484
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004485void InstructionCodeGeneratorX86_64::VisitParameterValue(
4486 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004487 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004488}
4489
4490void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4491 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004492 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004493 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4494}
4495
4496void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4497 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4498 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004499}
4500
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004501void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4502 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004503 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004504 locations->SetInAt(0, Location::RequiresRegister());
4505 locations->SetOut(Location::RequiresRegister());
4506}
4507
4508void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4509 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004510 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004511 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004512 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004513 __ movq(locations->Out().AsRegister<CpuRegister>(),
4514 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004515 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004516 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004517 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004518 __ movq(locations->Out().AsRegister<CpuRegister>(),
4519 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4520 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004521 __ movq(locations->Out().AsRegister<CpuRegister>(),
4522 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004523 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004524}
4525
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004526void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004527 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004528 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004529 locations->SetInAt(0, Location::RequiresRegister());
4530 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004531}
4532
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004533void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4534 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004535 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4536 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004537 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004538 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004539 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004540 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004541 break;
4542
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004543 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004544 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004545 break;
4546
4547 default:
4548 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4549 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004550}
4551
David Brazdil66d126e2015-04-03 16:02:44 +01004552void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4553 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004554 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004555 locations->SetInAt(0, Location::RequiresRegister());
4556 locations->SetOut(Location::SameAsFirstInput());
4557}
4558
4559void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004560 LocationSummary* locations = bool_not->GetLocations();
4561 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4562 locations->Out().AsRegister<CpuRegister>().AsRegister());
4563 Location out = locations->Out();
4564 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4565}
4566
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004567void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004568 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004569 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004570 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004571 locations->SetInAt(i, Location::Any());
4572 }
4573 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004574}
4575
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004576void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004577 LOG(FATAL) << "Unimplemented";
4578}
4579
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004580void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004581 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004582 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004583 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004584 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4585 */
4586 switch (kind) {
4587 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004588 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004589 break;
4590 }
4591 case MemBarrierKind::kAnyStore:
4592 case MemBarrierKind::kLoadAny:
4593 case MemBarrierKind::kStoreStore: {
4594 // nop
4595 break;
4596 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004597 case MemBarrierKind::kNTStoreStore:
4598 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004599 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004600 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004601 }
4602}
4603
4604void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4605 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4606
Roland Levillain0d5a2812015-11-13 10:07:31 +00004607 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004608 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004609 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004610 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4611 object_field_get_with_read_barrier
4612 ? LocationSummary::kCallOnSlowPath
4613 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004614 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004615 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004616 }
Calin Juravle52c48962014-12-16 17:02:57 +00004617 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004618 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004619 locations->SetOut(Location::RequiresFpuRegister());
4620 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004621 // The output overlaps for an object field get when read barriers
4622 // are enabled: we do not want the move to overwrite the object's
4623 // location, as we need it to emit the read barrier.
4624 locations->SetOut(
4625 Location::RequiresRegister(),
4626 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004627 }
Calin Juravle52c48962014-12-16 17:02:57 +00004628}
4629
4630void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4631 const FieldInfo& field_info) {
4632 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4633
4634 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004635 Location base_loc = locations->InAt(0);
4636 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004637 Location out = locations->Out();
4638 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004639 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4640 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004641 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4642
Vladimir Marko61b92282017-10-11 13:23:17 +01004643 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004644 case DataType::Type::kBool:
4645 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004646 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4647 break;
4648 }
4649
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004650 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004651 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4652 break;
4653 }
4654
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004655 case DataType::Type::kUint16: {
4656 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004657 break;
4658 }
4659
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004660 case DataType::Type::kInt16: {
4661 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004662 break;
4663 }
4664
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004665 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004666 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4667 break;
4668 }
4669
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004670 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004671 // /* HeapReference<Object> */ out = *(base + offset)
4672 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004673 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004674 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004675 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004676 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004677 if (is_volatile) {
4678 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4679 }
4680 } else {
4681 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4682 codegen_->MaybeRecordImplicitNullCheck(instruction);
4683 if (is_volatile) {
4684 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4685 }
4686 // If read barriers are enabled, emit read barriers other than
4687 // Baker's using a slow path (and also unpoison the loaded
4688 // reference, if heap poisoning is enabled).
4689 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4690 }
4691 break;
4692 }
4693
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004694 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004695 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4696 break;
4697 }
4698
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004699 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004700 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4701 break;
4702 }
4703
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004704 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004705 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4706 break;
4707 }
4708
Aart Bik66c158e2018-01-31 12:55:04 -08004709 case DataType::Type::kUint32:
4710 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004711 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004712 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004713 UNREACHABLE();
4714 }
4715
Vladimir Marko61b92282017-10-11 13:23:17 +01004716 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004717 // Potential implicit null checks, in the case of reference
4718 // fields, are handled in the previous switch statement.
4719 } else {
4720 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004721 }
Roland Levillain4d027112015-07-01 15:41:14 +01004722
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004723 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004724 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004725 // Memory barriers, in the case of references, are also handled
4726 // in the previous switch statement.
4727 } else {
4728 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4729 }
Roland Levillain4d027112015-07-01 15:41:14 +01004730 }
Calin Juravle52c48962014-12-16 17:02:57 +00004731}
4732
4733void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4734 const FieldInfo& field_info) {
4735 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4736
4737 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004738 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004739 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004740 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004741 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004742 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004743
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004744 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004745 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004746 if (is_volatile) {
4747 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4748 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4749 } else {
4750 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4751 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004752 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004753 if (is_volatile) {
4754 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4755 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4756 } else {
4757 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4758 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004759 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004760 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004761 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004762 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004763 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004764 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004765 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004766 locations->AddTemp(Location::RequiresRegister());
4767 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004768}
4769
Calin Juravle52c48962014-12-16 17:02:57 +00004770void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004771 const FieldInfo& field_info,
4772 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004773 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4774
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004775 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004776 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4777 Location value = locations->InAt(1);
4778 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004779 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004780 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4781
4782 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004783 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004784 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004785
Mark Mendellea5af682015-10-22 17:35:49 -04004786 bool maybe_record_implicit_null_check_done = false;
4787
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004788 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004789 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004790 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004791 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004792 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004793 __ movb(Address(base, offset),
4794 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004795 } else {
4796 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4797 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004798 break;
4799 }
4800
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004801 case DataType::Type::kUint16:
4802 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004803 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004804 __ movw(Address(base, offset),
4805 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004806 } else {
4807 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4808 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004809 break;
4810 }
4811
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004812 case DataType::Type::kInt32:
4813 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004814 if (value.IsConstant()) {
4815 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004816 // `field_type == DataType::Type::kReference` implies `v == 0`.
4817 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004818 // Note: if heap poisoning is enabled, no need to poison
4819 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004820 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004821 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004822 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004823 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4824 __ movl(temp, value.AsRegister<CpuRegister>());
4825 __ PoisonHeapReference(temp);
4826 __ movl(Address(base, offset), temp);
4827 } else {
4828 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4829 }
Mark Mendell40741f32015-04-20 22:10:34 -04004830 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004831 break;
4832 }
4833
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004834 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004835 if (value.IsConstant()) {
4836 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004837 codegen_->MoveInt64ToAddress(Address(base, offset),
4838 Address(base, offset + sizeof(int32_t)),
4839 v,
4840 instruction);
4841 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004842 } else {
4843 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4844 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004845 break;
4846 }
4847
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004848 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004849 if (value.IsConstant()) {
4850 int32_t v =
4851 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4852 __ movl(Address(base, offset), Immediate(v));
4853 } else {
4854 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4855 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004856 break;
4857 }
4858
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004859 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004860 if (value.IsConstant()) {
4861 int64_t v =
4862 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4863 codegen_->MoveInt64ToAddress(Address(base, offset),
4864 Address(base, offset + sizeof(int32_t)),
4865 v,
4866 instruction);
4867 maybe_record_implicit_null_check_done = true;
4868 } else {
4869 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4870 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004871 break;
4872 }
4873
Aart Bik66c158e2018-01-31 12:55:04 -08004874 case DataType::Type::kUint32:
4875 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004876 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004877 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004878 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004879 }
Calin Juravle52c48962014-12-16 17:02:57 +00004880
Mark Mendellea5af682015-10-22 17:35:49 -04004881 if (!maybe_record_implicit_null_check_done) {
4882 codegen_->MaybeRecordImplicitNullCheck(instruction);
4883 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004884
4885 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4886 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4887 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004888 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004889 }
4890
Calin Juravle52c48962014-12-16 17:02:57 +00004891 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004892 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004893 }
4894}
4895
4896void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4897 HandleFieldSet(instruction, instruction->GetFieldInfo());
4898}
4899
4900void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004901 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004902}
4903
4904void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004905 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004906}
4907
4908void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004909 HandleFieldGet(instruction, instruction->GetFieldInfo());
4910}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004911
Calin Juravle52c48962014-12-16 17:02:57 +00004912void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4913 HandleFieldGet(instruction);
4914}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004915
Calin Juravle52c48962014-12-16 17:02:57 +00004916void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4917 HandleFieldGet(instruction, instruction->GetFieldInfo());
4918}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004919
Calin Juravle52c48962014-12-16 17:02:57 +00004920void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4921 HandleFieldSet(instruction, instruction->GetFieldInfo());
4922}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004923
Calin Juravle52c48962014-12-16 17:02:57 +00004924void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004925 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004926}
4927
Vladimir Marko552a1342017-10-31 10:56:47 +00004928void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4929 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
4930}
4931
4932void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4933 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
4934 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
4935}
4936
Calin Juravlee460d1d2015-09-29 04:52:17 +01004937void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4938 HUnresolvedInstanceFieldGet* instruction) {
4939 FieldAccessCallingConventionX86_64 calling_convention;
4940 codegen_->CreateUnresolvedFieldLocationSummary(
4941 instruction, instruction->GetFieldType(), calling_convention);
4942}
4943
4944void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4945 HUnresolvedInstanceFieldGet* instruction) {
4946 FieldAccessCallingConventionX86_64 calling_convention;
4947 codegen_->GenerateUnresolvedFieldAccess(instruction,
4948 instruction->GetFieldType(),
4949 instruction->GetFieldIndex(),
4950 instruction->GetDexPc(),
4951 calling_convention);
4952}
4953
4954void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
4955 HUnresolvedInstanceFieldSet* instruction) {
4956 FieldAccessCallingConventionX86_64 calling_convention;
4957 codegen_->CreateUnresolvedFieldLocationSummary(
4958 instruction, instruction->GetFieldType(), calling_convention);
4959}
4960
4961void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
4962 HUnresolvedInstanceFieldSet* instruction) {
4963 FieldAccessCallingConventionX86_64 calling_convention;
4964 codegen_->GenerateUnresolvedFieldAccess(instruction,
4965 instruction->GetFieldType(),
4966 instruction->GetFieldIndex(),
4967 instruction->GetDexPc(),
4968 calling_convention);
4969}
4970
4971void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
4972 HUnresolvedStaticFieldGet* instruction) {
4973 FieldAccessCallingConventionX86_64 calling_convention;
4974 codegen_->CreateUnresolvedFieldLocationSummary(
4975 instruction, instruction->GetFieldType(), calling_convention);
4976}
4977
4978void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
4979 HUnresolvedStaticFieldGet* instruction) {
4980 FieldAccessCallingConventionX86_64 calling_convention;
4981 codegen_->GenerateUnresolvedFieldAccess(instruction,
4982 instruction->GetFieldType(),
4983 instruction->GetFieldIndex(),
4984 instruction->GetDexPc(),
4985 calling_convention);
4986}
4987
4988void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
4989 HUnresolvedStaticFieldSet* instruction) {
4990 FieldAccessCallingConventionX86_64 calling_convention;
4991 codegen_->CreateUnresolvedFieldLocationSummary(
4992 instruction, instruction->GetFieldType(), calling_convention);
4993}
4994
4995void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
4996 HUnresolvedStaticFieldSet* instruction) {
4997 FieldAccessCallingConventionX86_64 calling_convention;
4998 codegen_->GenerateUnresolvedFieldAccess(instruction,
4999 instruction->GetFieldType(),
5000 instruction->GetFieldIndex(),
5001 instruction->GetDexPc(),
5002 calling_convention);
5003}
5004
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005005void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005006 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5007 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5008 ? Location::RequiresRegister()
5009 : Location::Any();
5010 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005011}
5012
Calin Juravle2ae48182016-03-16 14:05:09 +00005013void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5014 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005015 return;
5016 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005017 LocationSummary* locations = instruction->GetLocations();
5018 Location obj = locations->InAt(0);
5019
5020 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005021 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005022}
5023
Calin Juravle2ae48182016-03-16 14:05:09 +00005024void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005025 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005026 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005027
5028 LocationSummary* locations = instruction->GetLocations();
5029 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005030
5031 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005032 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005033 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005034 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005035 } else {
5036 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005037 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005038 __ jmp(slow_path->GetEntryLabel());
5039 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005040 }
5041 __ j(kEqual, slow_path->GetEntryLabel());
5042}
5043
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005044void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005045 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005046}
5047
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005048void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005049 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005050 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005051 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005052 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5053 object_array_get_with_read_barrier
5054 ? LocationSummary::kCallOnSlowPath
5055 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005056 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005057 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005058 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005059 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005060 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005061 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005062 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5063 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005064 // The output overlaps for an object array get when read barriers
5065 // are enabled: we do not want the move to overwrite the array's
5066 // location, as we need it to emit the read barrier.
5067 locations->SetOut(
5068 Location::RequiresRegister(),
5069 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005070 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005071}
5072
5073void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5074 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005075 Location obj_loc = locations->InAt(0);
5076 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005077 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005078 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005079 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005080
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005081 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005082 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005083 case DataType::Type::kBool:
5084 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005085 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005086 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005087 break;
5088 }
5089
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005090 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005091 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005092 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005093 break;
5094 }
5095
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005096 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005097 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005098 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5099 // Branch cases into compressed and uncompressed for each index's type.
5100 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5101 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005102 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005103 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005104 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5105 "Expecting 0=compressed, 1=uncompressed");
5106 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005107 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5108 __ jmp(&done);
5109 __ Bind(&not_compressed);
5110 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5111 __ Bind(&done);
5112 } else {
5113 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5114 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005115 break;
5116 }
5117
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005118 case DataType::Type::kInt16: {
5119 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5120 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5121 break;
5122 }
5123
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005124 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005125 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005126 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005127 break;
5128 }
5129
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005130 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005131 static_assert(
5132 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5133 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005134 // /* HeapReference<Object> */ out =
5135 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5136 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005137 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005138 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005139 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005140 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005141 } else {
5142 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005143 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5144 codegen_->MaybeRecordImplicitNullCheck(instruction);
5145 // If read barriers are enabled, emit read barriers other than
5146 // Baker's using a slow path (and also unpoison the loaded
5147 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005148 if (index.IsConstant()) {
5149 uint32_t offset =
5150 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005151 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5152 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005153 codegen_->MaybeGenerateReadBarrierSlow(
5154 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5155 }
5156 }
5157 break;
5158 }
5159
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005160 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005161 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005162 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005163 break;
5164 }
5165
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005166 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005167 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005168 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005169 break;
5170 }
5171
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005172 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005173 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005174 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005175 break;
5176 }
5177
Aart Bik66c158e2018-01-31 12:55:04 -08005178 case DataType::Type::kUint32:
5179 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005180 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005181 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005182 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005183 }
Roland Levillain4d027112015-07-01 15:41:14 +01005184
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005185 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005186 // Potential implicit null checks, in the case of reference
5187 // arrays, are handled in the previous switch statement.
5188 } else {
5189 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005190 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005191}
5192
5193void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005194 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005195
5196 bool needs_write_barrier =
5197 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005198 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005199
Vladimir Markoca6fff82017-10-03 14:49:14 +01005200 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005201 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005202 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005203
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005204 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005205 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005206 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005207 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005208 } else {
5209 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5210 }
5211
5212 if (needs_write_barrier) {
5213 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005214 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005215 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005216 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005217}
5218
5219void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5220 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005221 Location array_loc = locations->InAt(0);
5222 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005223 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005224 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005225 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005226 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005227 bool needs_write_barrier =
5228 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005229
5230 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005231 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005232 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005233 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005234 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005235 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005236 if (value.IsRegister()) {
5237 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005238 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005239 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005240 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005241 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005242 break;
5243 }
5244
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005245 case DataType::Type::kUint16:
5246 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005247 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005248 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005249 if (value.IsRegister()) {
5250 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005251 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005252 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005253 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005254 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005255 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005256 break;
5257 }
5258
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005259 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005260 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005261 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005262
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005263 if (!value.IsRegister()) {
5264 // Just setting null.
5265 DCHECK(instruction->InputAt(2)->IsNullConstant());
5266 DCHECK(value.IsConstant()) << value;
5267 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005268 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005269 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005270 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005271 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005272 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005273
5274 DCHECK(needs_write_barrier);
5275 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005276 Location temp_loc = locations->GetTemp(0);
5277 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005278
5279 bool can_value_be_null = instruction->GetValueCanBeNull();
5280 NearLabel do_store;
5281 if (can_value_be_null) {
5282 __ testl(register_value, register_value);
5283 __ j(kEqual, &do_store);
5284 }
5285
5286 SlowPathCode* slow_path = nullptr;
5287 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005288 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005289 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005290
5291 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5292 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5293 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005294
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005295 // Note that when Baker read barriers are enabled, the type
5296 // checks are performed without read barriers. This is fine,
5297 // even in the case where a class object is in the from-space
5298 // after the flip, as a comparison involving such a type would
5299 // not produce a false positive; it may of course produce a
5300 // false negative, in which case we would take the ArraySet
5301 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005302
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005303 // /* HeapReference<Class> */ temp = array->klass_
5304 __ movl(temp, Address(array, class_offset));
5305 codegen_->MaybeRecordImplicitNullCheck(instruction);
5306 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005307
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005308 // /* HeapReference<Class> */ temp = temp->component_type_
5309 __ movl(temp, Address(temp, component_offset));
5310 // If heap poisoning is enabled, no need to unpoison `temp`
5311 // nor the object reference in `register_value->klass`, as
5312 // we are comparing two poisoned references.
5313 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005314
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005315 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005316 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005317 __ j(kEqual, &do_put);
5318 // If heap poisoning is enabled, the `temp` reference has
5319 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005320 __ MaybeUnpoisonHeapReference(temp);
5321
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005322 // If heap poisoning is enabled, no need to unpoison the
5323 // heap reference loaded below, as it is only used for a
5324 // comparison with null.
5325 __ cmpl(Address(temp, super_offset), Immediate(0));
5326 __ j(kNotEqual, slow_path->GetEntryLabel());
5327 __ Bind(&do_put);
5328 } else {
5329 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005330 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005331 }
5332
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005333 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5334 codegen_->MarkGCCard(
5335 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5336
5337 if (can_value_be_null) {
5338 DCHECK(do_store.IsLinked());
5339 __ Bind(&do_store);
5340 }
5341
5342 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005343 if (kPoisonHeapReferences) {
5344 __ movl(temp, register_value);
5345 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005346 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005347 }
5348
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005349 __ movl(address, source.AsRegister<CpuRegister>());
5350
5351 if (can_value_be_null || !needs_type_check) {
5352 codegen_->MaybeRecordImplicitNullCheck(instruction);
5353 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005354
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005355 if (slow_path != nullptr) {
5356 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005357 }
5358
5359 break;
5360 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005361
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005362 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005363 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005364 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005365 if (value.IsRegister()) {
5366 __ movl(address, value.AsRegister<CpuRegister>());
5367 } else {
5368 DCHECK(value.IsConstant()) << value;
5369 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5370 __ movl(address, Immediate(v));
5371 }
5372 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005373 break;
5374 }
5375
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005376 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005377 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005378 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005379 if (value.IsRegister()) {
5380 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005381 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005382 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005383 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005384 Address address_high =
5385 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005386 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005387 }
5388 break;
5389 }
5390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005391 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005392 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005393 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005394 if (value.IsFpuRegister()) {
5395 __ movss(address, value.AsFpuRegister<XmmRegister>());
5396 } else {
5397 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005398 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005399 __ movl(address, Immediate(v));
5400 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005401 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005402 break;
5403 }
5404
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005405 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005406 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005407 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005408 if (value.IsFpuRegister()) {
5409 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5410 codegen_->MaybeRecordImplicitNullCheck(instruction);
5411 } else {
5412 int64_t v =
5413 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005414 Address address_high =
5415 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005416 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5417 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005418 break;
5419 }
5420
Aart Bik66c158e2018-01-31 12:55:04 -08005421 case DataType::Type::kUint32:
5422 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005423 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005424 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005425 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005426 }
5427}
5428
5429void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005430 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005431 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005432 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005433 if (!instruction->IsEmittedAtUseSite()) {
5434 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5435 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005436}
5437
5438void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005439 if (instruction->IsEmittedAtUseSite()) {
5440 return;
5441 }
5442
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005443 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005444 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005445 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5446 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005447 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005448 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005449 // Mask out most significant bit in case the array is String's array of char.
5450 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005451 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005452 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005453}
5454
5455void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005456 RegisterSet caller_saves = RegisterSet::Empty();
5457 InvokeRuntimeCallingConvention calling_convention;
5458 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5459 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5460 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005461 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005462 HInstruction* length = instruction->InputAt(1);
5463 if (!length->IsEmittedAtUseSite()) {
5464 locations->SetInAt(1, Location::RegisterOrConstant(length));
5465 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005466}
5467
5468void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5469 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005470 Location index_loc = locations->InAt(0);
5471 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005472 SlowPathCode* slow_path =
5473 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005474
Mark Mendell99dbd682015-04-22 16:18:52 -04005475 if (length_loc.IsConstant()) {
5476 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5477 if (index_loc.IsConstant()) {
5478 // BCE will remove the bounds check if we are guarenteed to pass.
5479 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5480 if (index < 0 || index >= length) {
5481 codegen_->AddSlowPath(slow_path);
5482 __ jmp(slow_path->GetEntryLabel());
5483 } else {
5484 // Some optimization after BCE may have generated this, and we should not
5485 // generate a bounds check if it is a valid range.
5486 }
5487 return;
5488 }
5489
5490 // We have to reverse the jump condition because the length is the constant.
5491 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5492 __ cmpl(index_reg, Immediate(length));
5493 codegen_->AddSlowPath(slow_path);
5494 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005495 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005496 HInstruction* array_length = instruction->InputAt(1);
5497 if (array_length->IsEmittedAtUseSite()) {
5498 // Address the length field in the array.
5499 DCHECK(array_length->IsArrayLength());
5500 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5501 Location array_loc = array_length->GetLocations()->InAt(0);
5502 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005503 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005504 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5505 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005506 CpuRegister length_reg = CpuRegister(TMP);
5507 __ movl(length_reg, array_len);
5508 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005509 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005510 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005511 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005512 // Checking the bound for general case:
5513 // Array of char or String's array when the compression feature off.
5514 if (index_loc.IsConstant()) {
5515 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5516 __ cmpl(array_len, Immediate(value));
5517 } else {
5518 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5519 }
5520 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005521 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005522 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005523 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005524 }
5525 codegen_->AddSlowPath(slow_path);
5526 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005527 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005528}
5529
5530void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5531 CpuRegister card,
5532 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005533 CpuRegister value,
5534 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005535 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005536 if (value_can_be_null) {
5537 __ testl(value, value);
5538 __ j(kEqual, &is_null);
5539 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005540 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005541 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005542 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005543 // Calculate the offset (in the card table) of the card corresponding to
5544 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005545 __ movq(temp, object);
5546 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005547 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5548 // `object`'s card.
5549 //
5550 // Register `card` contains the address of the card table. Note that the card
5551 // table's base is biased during its creation so that it always starts at an
5552 // address whose least-significant byte is equal to `kCardDirty` (see
5553 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5554 // below writes the `kCardDirty` (byte) value into the `object`'s card
5555 // (located at `card + object >> kCardShift`).
5556 //
5557 // This dual use of the value in register `card` (1. to calculate the location
5558 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5559 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005560 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005561 if (value_can_be_null) {
5562 __ Bind(&is_null);
5563 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005564}
5565
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005566void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005567 LOG(FATAL) << "Unimplemented";
5568}
5569
5570void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005571 if (instruction->GetNext()->IsSuspendCheck() &&
5572 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5573 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5574 // The back edge will generate the suspend check.
5575 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5576 }
5577
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005578 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5579}
5580
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005581void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005582 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5583 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005584 // In suspend check slow path, usually there are no caller-save registers at all.
5585 // If SIMD instructions are present, however, we force spilling all live SIMD
5586 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005587 locations->SetCustomSlowPathCallerSaves(
5588 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005589}
5590
5591void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005592 HBasicBlock* block = instruction->GetBlock();
5593 if (block->GetLoopInformation() != nullptr) {
5594 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5595 // The back edge will generate the suspend check.
5596 return;
5597 }
5598 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5599 // The goto will generate the suspend check.
5600 return;
5601 }
5602 GenerateSuspendCheck(instruction, nullptr);
5603}
5604
5605void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5606 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005607 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005608 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5609 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005610 slow_path =
5611 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005612 instruction->SetSlowPath(slow_path);
5613 codegen_->AddSlowPath(slow_path);
5614 if (successor != nullptr) {
5615 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005616 }
5617 } else {
5618 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5619 }
5620
Andreas Gampe542451c2016-07-26 09:02:02 -07005621 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005622 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005623 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005624 if (successor == nullptr) {
5625 __ j(kNotEqual, slow_path->GetEntryLabel());
5626 __ Bind(slow_path->GetReturnLabel());
5627 } else {
5628 __ j(kEqual, codegen_->GetLabelOf(successor));
5629 __ jmp(slow_path->GetEntryLabel());
5630 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005631}
5632
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005633X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5634 return codegen_->GetAssembler();
5635}
5636
5637void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005638 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005639 Location source = move->GetSource();
5640 Location destination = move->GetDestination();
5641
5642 if (source.IsRegister()) {
5643 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005644 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005645 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005646 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005647 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005648 } else {
5649 DCHECK(destination.IsDoubleStackSlot());
5650 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005651 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005652 }
5653 } else if (source.IsStackSlot()) {
5654 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005655 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005656 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005657 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005658 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005659 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005660 } else {
5661 DCHECK(destination.IsStackSlot());
5662 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5663 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5664 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005665 } else if (source.IsDoubleStackSlot()) {
5666 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005667 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005668 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005669 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005670 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5671 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005672 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005673 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005674 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5675 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5676 }
Aart Bik5576f372017-03-23 16:17:37 -07005677 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005678 if (destination.IsFpuRegister()) {
5679 __ movups(destination.AsFpuRegister<XmmRegister>(),
5680 Address(CpuRegister(RSP), source.GetStackIndex()));
5681 } else {
5682 DCHECK(destination.IsSIMDStackSlot());
5683 size_t high = kX86_64WordSize;
5684 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5685 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5686 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5687 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5688 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005689 } else if (source.IsConstant()) {
5690 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005691 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5692 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005693 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005694 if (value == 0) {
5695 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5696 } else {
5697 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5698 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005699 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005700 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005701 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005702 }
5703 } else if (constant->IsLongConstant()) {
5704 int64_t value = constant->AsLongConstant()->GetValue();
5705 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005706 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005707 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005708 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005709 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005710 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005711 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005712 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005713 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005714 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005715 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005716 } else {
5717 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005718 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005719 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5720 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005721 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005722 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005723 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005724 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005725 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005726 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005727 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005728 } else {
5729 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005730 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005731 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005732 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005733 } else if (source.IsFpuRegister()) {
5734 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005735 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005736 } else if (destination.IsStackSlot()) {
5737 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005738 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005739 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005740 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005741 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005742 } else {
5743 DCHECK(destination.IsSIMDStackSlot());
5744 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5745 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005746 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005747 }
5748}
5749
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005750void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005751 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005752 __ movl(Address(CpuRegister(RSP), mem), reg);
5753 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005754}
5755
Mark Mendell8a1c7282015-06-29 15:41:28 -04005756void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5757 __ movq(CpuRegister(TMP), reg1);
5758 __ movq(reg1, reg2);
5759 __ movq(reg2, CpuRegister(TMP));
5760}
5761
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005762void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5763 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5764 __ movq(Address(CpuRegister(RSP), mem), reg);
5765 __ movq(reg, CpuRegister(TMP));
5766}
5767
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005768void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5769 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5770 __ movss(Address(CpuRegister(RSP), mem), reg);
5771 __ movd(reg, CpuRegister(TMP));
5772}
5773
5774void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5775 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5776 __ movsd(Address(CpuRegister(RSP), mem), reg);
5777 __ movd(reg, CpuRegister(TMP));
5778}
5779
Aart Bikcfe50bb2017-12-12 14:54:12 -08005780void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5781 size_t extra_slot = 2 * kX86_64WordSize;
5782 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5783 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5784 ExchangeMemory64(0, mem + extra_slot, 2);
5785 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5786 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5787}
5788
5789void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5790 ScratchRegisterScope ensure_scratch(
5791 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5792
5793 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5794 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5795 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5796 Address(CpuRegister(RSP), mem2 + stack_offset));
5797 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5798 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5799 CpuRegister(ensure_scratch.GetRegister()));
5800}
5801
5802void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5803 ScratchRegisterScope ensure_scratch(
5804 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5805
5806 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5807
5808 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5809 for (int i = 0; i < num_of_qwords; i++) {
5810 __ movq(CpuRegister(TMP),
5811 Address(CpuRegister(RSP), mem1 + stack_offset));
5812 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5813 Address(CpuRegister(RSP), mem2 + stack_offset));
5814 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5815 CpuRegister(TMP));
5816 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5817 CpuRegister(ensure_scratch.GetRegister()));
5818 stack_offset += kX86_64WordSize;
5819 }
5820}
5821
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005822void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005823 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005824 Location source = move->GetSource();
5825 Location destination = move->GetDestination();
5826
5827 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005828 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005829 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005830 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005831 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005832 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005833 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005834 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005835 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005836 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005837 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005838 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005839 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005840 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005841 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005842 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5843 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5844 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005845 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005846 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005847 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005848 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005849 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005850 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005851 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005852 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005853 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5854 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5855 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5856 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5857 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5858 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005859 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005860 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005861 }
5862}
5863
5864
5865void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5866 __ pushq(CpuRegister(reg));
5867}
5868
5869
5870void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5871 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005872}
5873
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005874void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005875 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005876 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5877 const size_t status_byte_offset =
5878 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01005879 constexpr uint32_t shifted_visibly_initialized_value =
5880 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00005881
Vladimir Markobf121912019-06-04 13:49:05 +01005882 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005883 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005884 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005885}
5886
Vladimir Marko175e7862018-03-27 09:03:13 +00005887void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5888 CpuRegister temp) {
5889 uint32_t path_to_root = check->GetBitstringPathToRoot();
5890 uint32_t mask = check->GetBitstringMask();
5891 DCHECK(IsPowerOfTwo(mask + 1));
5892 size_t mask_bits = WhichPowerOf2(mask + 1);
5893
5894 if (mask_bits == 16u) {
5895 // Compare the bitstring in memory.
5896 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5897 } else {
5898 // /* uint32_t */ temp = temp->status_
5899 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5900 // Compare the bitstring bits using SUB.
5901 __ subl(temp, Immediate(path_to_root));
5902 // Shift out bits that do not contribute to the comparison.
5903 __ shll(temp, Immediate(32u - mask_bits));
5904 }
5905}
5906
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005907HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5908 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005909 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005910 case HLoadClass::LoadKind::kInvalid:
5911 LOG(FATAL) << "UNREACHABLE";
5912 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005913 case HLoadClass::LoadKind::kReferrersClass:
5914 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005915 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005916 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005917 case HLoadClass::LoadKind::kBssEntry:
5918 DCHECK(!Runtime::Current()->UseJitCompilation());
5919 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005920 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005921 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005922 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005923 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005924 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005925 break;
5926 }
5927 return desired_class_load_kind;
5928}
5929
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005930void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005931 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005932 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005933 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005934 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005935 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005936 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005937 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005938 return;
5939 }
Vladimir Marko41559982017-01-06 14:04:23 +00005940 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005941
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005942 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5943 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005944 ? LocationSummary::kCallOnSlowPath
5945 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005946 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005947 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005948 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005949 }
5950
Vladimir Marko41559982017-01-06 14:04:23 +00005951 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005952 locations->SetInAt(0, Location::RequiresRegister());
5953 }
5954 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005955 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5956 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5957 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005958 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005959 } else {
5960 // For non-Baker read barrier we have a temp-clobbering call.
5961 }
5962 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005963}
5964
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005965Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01005966 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005967 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005968 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005969 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005970 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005971 PatchInfo<Label>* info = &jit_class_patches_.back();
5972 return &info->label;
5973}
5974
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005975// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5976// move.
5977void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005978 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005979 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005980 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005981 return;
5982 }
Vladimir Marko41559982017-01-06 14:04:23 +00005983 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005984
Vladimir Marko41559982017-01-06 14:04:23 +00005985 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005986 Location out_loc = locations->Out();
5987 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005988
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005989 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5990 ? kWithoutReadBarrier
5991 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005992 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005993 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005994 case HLoadClass::LoadKind::kReferrersClass: {
5995 DCHECK(!cls->CanCallRuntime());
5996 DCHECK(!cls->MustGenerateClinitCheck());
5997 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5998 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
5999 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006000 cls,
6001 out_loc,
6002 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006003 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006004 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006005 break;
6006 }
6007 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006008 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6009 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006010 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Andreas Gampe3db70682018-12-26 15:12:03 -08006011 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006012 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006013 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006014 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006015 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006016 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006017 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006018 break;
6019 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006020 case HLoadClass::LoadKind::kBssEntry: {
6021 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006022 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006023 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6024 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6025 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006026 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006027 generate_null_check = true;
6028 break;
6029 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006030 case HLoadClass::LoadKind::kJitBootImageAddress: {
6031 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6032 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6033 DCHECK_NE(address, 0u);
6034 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6035 break;
6036 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006037 case HLoadClass::LoadKind::kJitTableAddress: {
6038 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006039 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006040 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006041 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006042 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006043 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006044 break;
6045 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006046 default:
6047 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6048 UNREACHABLE();
6049 }
6050
6051 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6052 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006053 SlowPathCode* slow_path =
6054 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006055 codegen_->AddSlowPath(slow_path);
6056 if (generate_null_check) {
6057 __ testl(out, out);
6058 __ j(kEqual, slow_path->GetEntryLabel());
6059 }
6060 if (cls->MustGenerateClinitCheck()) {
6061 GenerateClassInitializationCheck(slow_path, out);
6062 } else {
6063 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006064 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006065 }
6066}
6067
6068void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6069 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006070 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006071 locations->SetInAt(0, Location::RequiresRegister());
6072 if (check->HasUses()) {
6073 locations->SetOut(Location::SameAsFirstInput());
6074 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006075 // Rely on the type initialization to save everything we need.
6076 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006077}
6078
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006079void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6080 // Custom calling convention: RAX serves as both input and output.
6081 Location location = Location::RegisterLocation(RAX);
6082 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6083}
6084
6085void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6086 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6087}
6088
Orion Hodson18259d72018-04-12 11:18:23 +01006089void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6090 // Custom calling convention: RAX serves as both input and output.
6091 Location location = Location::RegisterLocation(RAX);
6092 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6093}
6094
6095void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6096 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6097}
6098
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006099void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006100 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006101 SlowPathCode* slow_path =
6102 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006103 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006104 GenerateClassInitializationCheck(slow_path,
6105 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006106}
6107
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006108HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6109 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006110 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006111 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006112 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006113 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006114 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006115 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006116 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006117 case HLoadString::LoadKind::kJitTableAddress:
6118 DCHECK(Runtime::Current()->UseJitCompilation());
6119 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006120 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006121 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006122 }
6123 return desired_string_load_kind;
6124}
6125
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006126void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006127 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006128 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006129 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006130 locations->SetOut(Location::RegisterLocation(RAX));
6131 } else {
6132 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006133 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6134 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006135 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006136 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006137 } else {
6138 // For non-Baker read barrier we have a temp-clobbering call.
6139 }
6140 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006141 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006142}
6143
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006144Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006145 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006146 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006147 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006148 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006149 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006150 PatchInfo<Label>* info = &jit_string_patches_.back();
6151 return &info->label;
6152}
6153
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006154// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6155// move.
6156void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006157 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006158 Location out_loc = locations->Out();
6159 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006160
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006161 switch (load->GetLoadKind()) {
6162 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006163 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6164 codegen_->GetCompilerOptions().IsBootImageExtension());
Andreas Gampe3db70682018-12-26 15:12:03 -08006165 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006166 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006167 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006168 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006169 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006170 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006171 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006172 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006173 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006174 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006175 case HLoadString::LoadKind::kBssEntry: {
6176 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006177 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006178 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6179 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006180 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006181 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006182 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006183 codegen_->AddSlowPath(slow_path);
6184 __ testl(out, out);
6185 __ j(kEqual, slow_path->GetEntryLabel());
6186 __ Bind(slow_path->GetExitLabel());
6187 return;
6188 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006189 case HLoadString::LoadKind::kJitBootImageAddress: {
6190 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6191 DCHECK_NE(address, 0u);
6192 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6193 return;
6194 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006195 case HLoadString::LoadKind::kJitTableAddress: {
6196 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006197 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006198 Label* fixup_label = codegen_->NewJitRootStringPatch(
6199 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006200 // /* GcRoot<mirror::String> */ out = *address
6201 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6202 return;
6203 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006204 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006205 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006206 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006207
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006208 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006209 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006210 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006211 codegen_->InvokeRuntime(kQuickResolveString,
6212 load,
6213 load->GetDexPc());
6214 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006215}
6216
David Brazdilcb1c0552015-08-04 16:22:25 +01006217static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006218 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006219 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006220}
6221
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006222void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6223 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006224 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006225 locations->SetOut(Location::RequiresRegister());
6226}
6227
6228void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006229 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6230}
6231
6232void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006233 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006234}
6235
6236void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6237 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006238}
6239
6240void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006241 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6242 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006243 InvokeRuntimeCallingConvention calling_convention;
6244 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6245}
6246
6247void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006248 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006249 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006250}
6251
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006252// Temp is used for read barrier.
6253static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6254 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006255 !kUseBakerReadBarrier &&
6256 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006257 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006258 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6259 return 1;
6260 }
6261 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006262}
6263
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006264// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6265// interface pointer, the current interface is compared in memory.
6266// The other checks have one temp for loading the object's class.
6267static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6268 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6269 return 2;
6270 }
6271 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006272}
6273
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006274void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006275 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006276 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006277 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006278 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006279 case TypeCheckKind::kExactCheck:
6280 case TypeCheckKind::kAbstractClassCheck:
6281 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006282 case TypeCheckKind::kArrayObjectCheck: {
6283 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6284 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6285 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006286 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006287 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006288 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006289 case TypeCheckKind::kUnresolvedCheck:
6290 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006291 call_kind = LocationSummary::kCallOnSlowPath;
6292 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006293 case TypeCheckKind::kBitstringCheck:
6294 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006295 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006296
Vladimir Markoca6fff82017-10-03 14:49:14 +01006297 LocationSummary* locations =
6298 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006299 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006300 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006301 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006302 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006303 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6304 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6305 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6306 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6307 } else {
6308 locations->SetInAt(1, Location::Any());
6309 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006310 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6311 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006312 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006313}
6314
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006315void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006316 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006317 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006318 Location obj_loc = locations->InAt(0);
6319 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006320 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006321 Location out_loc = locations->Out();
6322 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006323 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6324 DCHECK_LE(num_temps, 1u);
6325 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006326 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006327 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6328 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6329 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006330 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006331 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006332
6333 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006334 // Avoid null check if we know obj is not null.
6335 if (instruction->MustDoNullCheck()) {
6336 __ testl(obj, obj);
6337 __ j(kEqual, &zero);
6338 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006339
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006340 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006341 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006342 ReadBarrierOption read_barrier_option =
6343 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006344 // /* HeapReference<Class> */ out = obj->klass_
6345 GenerateReferenceLoadTwoRegisters(instruction,
6346 out_loc,
6347 obj_loc,
6348 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006349 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006350 if (cls.IsRegister()) {
6351 __ cmpl(out, cls.AsRegister<CpuRegister>());
6352 } else {
6353 DCHECK(cls.IsStackSlot()) << cls;
6354 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6355 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006356 if (zero.IsLinked()) {
6357 // Classes must be equal for the instanceof to succeed.
6358 __ j(kNotEqual, &zero);
6359 __ movl(out, Immediate(1));
6360 __ jmp(&done);
6361 } else {
6362 __ setcc(kEqual, out);
6363 // setcc only sets the low byte.
6364 __ andl(out, Immediate(1));
6365 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006366 break;
6367 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006368
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006369 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006370 ReadBarrierOption read_barrier_option =
6371 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006372 // /* HeapReference<Class> */ out = obj->klass_
6373 GenerateReferenceLoadTwoRegisters(instruction,
6374 out_loc,
6375 obj_loc,
6376 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006377 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006378 // If the class is abstract, we eagerly fetch the super class of the
6379 // object to avoid doing a comparison we know will fail.
6380 NearLabel loop, success;
6381 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006382 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006383 GenerateReferenceLoadOneRegister(instruction,
6384 out_loc,
6385 super_offset,
6386 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006387 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006388 __ testl(out, out);
6389 // If `out` is null, we use it for the result, and jump to `done`.
6390 __ j(kEqual, &done);
6391 if (cls.IsRegister()) {
6392 __ cmpl(out, cls.AsRegister<CpuRegister>());
6393 } else {
6394 DCHECK(cls.IsStackSlot()) << cls;
6395 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6396 }
6397 __ j(kNotEqual, &loop);
6398 __ movl(out, Immediate(1));
6399 if (zero.IsLinked()) {
6400 __ jmp(&done);
6401 }
6402 break;
6403 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006404
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006405 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006406 ReadBarrierOption read_barrier_option =
6407 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006408 // /* HeapReference<Class> */ out = obj->klass_
6409 GenerateReferenceLoadTwoRegisters(instruction,
6410 out_loc,
6411 obj_loc,
6412 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006413 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006414 // Walk over the class hierarchy to find a match.
6415 NearLabel loop, success;
6416 __ Bind(&loop);
6417 if (cls.IsRegister()) {
6418 __ cmpl(out, cls.AsRegister<CpuRegister>());
6419 } else {
6420 DCHECK(cls.IsStackSlot()) << cls;
6421 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6422 }
6423 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006424 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006425 GenerateReferenceLoadOneRegister(instruction,
6426 out_loc,
6427 super_offset,
6428 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006429 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006430 __ testl(out, out);
6431 __ j(kNotEqual, &loop);
6432 // If `out` is null, we use it for the result, and jump to `done`.
6433 __ jmp(&done);
6434 __ Bind(&success);
6435 __ movl(out, Immediate(1));
6436 if (zero.IsLinked()) {
6437 __ jmp(&done);
6438 }
6439 break;
6440 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006441
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006442 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006443 ReadBarrierOption read_barrier_option =
6444 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006445 // /* HeapReference<Class> */ out = obj->klass_
6446 GenerateReferenceLoadTwoRegisters(instruction,
6447 out_loc,
6448 obj_loc,
6449 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006450 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006451 // Do an exact check.
6452 NearLabel exact_check;
6453 if (cls.IsRegister()) {
6454 __ cmpl(out, cls.AsRegister<CpuRegister>());
6455 } else {
6456 DCHECK(cls.IsStackSlot()) << cls;
6457 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6458 }
6459 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006460 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006461 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006462 GenerateReferenceLoadOneRegister(instruction,
6463 out_loc,
6464 component_offset,
6465 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006466 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006467 __ testl(out, out);
6468 // If `out` is null, we use it for the result, and jump to `done`.
6469 __ j(kEqual, &done);
6470 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6471 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006472 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006473 __ movl(out, Immediate(1));
6474 __ jmp(&done);
6475 break;
6476 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006477
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006478 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006479 // No read barrier since the slow path will retry upon failure.
6480 // /* HeapReference<Class> */ out = obj->klass_
6481 GenerateReferenceLoadTwoRegisters(instruction,
6482 out_loc,
6483 obj_loc,
6484 class_offset,
6485 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006486 if (cls.IsRegister()) {
6487 __ cmpl(out, cls.AsRegister<CpuRegister>());
6488 } else {
6489 DCHECK(cls.IsStackSlot()) << cls;
6490 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6491 }
6492 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006493 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006494 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006495 codegen_->AddSlowPath(slow_path);
6496 __ j(kNotEqual, slow_path->GetEntryLabel());
6497 __ movl(out, Immediate(1));
6498 if (zero.IsLinked()) {
6499 __ jmp(&done);
6500 }
6501 break;
6502 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006503
Calin Juravle98893e12015-10-02 21:05:03 +01006504 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006505 case TypeCheckKind::kInterfaceCheck: {
6506 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006507 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006508 // cases.
6509 //
6510 // We cannot directly call the InstanceofNonTrivial runtime
6511 // entry point without resorting to a type checking slow path
6512 // here (i.e. by calling InvokeRuntime directly), as it would
6513 // require to assign fixed registers for the inputs of this
6514 // HInstanceOf instruction (following the runtime calling
6515 // convention), which might be cluttered by the potential first
6516 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006517 //
6518 // TODO: Introduce a new runtime entry point taking the object
6519 // to test (instead of its class) as argument, and let it deal
6520 // with the read barrier issues. This will let us refactor this
6521 // case of the `switch` code as it was previously (with a direct
6522 // call to the runtime not using a type checking slow path).
6523 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006524 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006525 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006526 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006527 codegen_->AddSlowPath(slow_path);
6528 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006529 if (zero.IsLinked()) {
6530 __ jmp(&done);
6531 }
6532 break;
6533 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006534
6535 case TypeCheckKind::kBitstringCheck: {
6536 // /* HeapReference<Class> */ temp = obj->klass_
6537 GenerateReferenceLoadTwoRegisters(instruction,
6538 out_loc,
6539 obj_loc,
6540 class_offset,
6541 kWithoutReadBarrier);
6542
6543 GenerateBitstringTypeCheckCompare(instruction, out);
6544 if (zero.IsLinked()) {
6545 __ j(kNotEqual, &zero);
6546 __ movl(out, Immediate(1));
6547 __ jmp(&done);
6548 } else {
6549 __ setcc(kEqual, out);
6550 // setcc only sets the low byte.
6551 __ andl(out, Immediate(1));
6552 }
6553 break;
6554 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006555 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006556
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006557 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006558 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006559 __ xorl(out, out);
6560 }
6561
6562 if (done.IsLinked()) {
6563 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006564 }
6565
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006566 if (slow_path != nullptr) {
6567 __ Bind(slow_path->GetExitLabel());
6568 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006569}
6570
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006571void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006572 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006573 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006574 LocationSummary* locations =
6575 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006576 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006577 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6578 // Require a register for the interface check since there is a loop that compares the class to
6579 // a memory address.
6580 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006581 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6582 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6583 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6584 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006585 } else {
6586 locations->SetInAt(1, Location::Any());
6587 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006588 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6589 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006590}
6591
6592void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006593 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006594 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006595 Location obj_loc = locations->InAt(0);
6596 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006597 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006598 Location temp_loc = locations->GetTemp(0);
6599 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006600 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6601 DCHECK_GE(num_temps, 1u);
6602 DCHECK_LE(num_temps, 2u);
6603 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006604 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6605 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6606 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6607 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6608 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6609 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006610 const uint32_t object_array_data_offset =
6611 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006612
Vladimir Marko87584542017-12-12 17:47:52 +00006613 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006614 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006615 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6616 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006617 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006618
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006619
6620 NearLabel done;
6621 // Avoid null check if we know obj is not null.
6622 if (instruction->MustDoNullCheck()) {
6623 __ testl(obj, obj);
6624 __ j(kEqual, &done);
6625 }
6626
Roland Levillain0d5a2812015-11-13 10:07:31 +00006627 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006628 case TypeCheckKind::kExactCheck:
6629 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006630 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006631 GenerateReferenceLoadTwoRegisters(instruction,
6632 temp_loc,
6633 obj_loc,
6634 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006635 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006636 if (cls.IsRegister()) {
6637 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6638 } else {
6639 DCHECK(cls.IsStackSlot()) << cls;
6640 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6641 }
6642 // Jump to slow path for throwing the exception or doing a
6643 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006644 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006645 break;
6646 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006647
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006648 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006649 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006650 GenerateReferenceLoadTwoRegisters(instruction,
6651 temp_loc,
6652 obj_loc,
6653 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006654 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006655 // If the class is abstract, we eagerly fetch the super class of the
6656 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006657 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006658 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006659 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006660 GenerateReferenceLoadOneRegister(instruction,
6661 temp_loc,
6662 super_offset,
6663 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006664 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006665
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006666 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6667 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006668 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006669 // Otherwise, compare the classes.
6670 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006671 if (cls.IsRegister()) {
6672 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6673 } else {
6674 DCHECK(cls.IsStackSlot()) << cls;
6675 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6676 }
6677 __ j(kNotEqual, &loop);
6678 break;
6679 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006680
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006681 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006682 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006683 GenerateReferenceLoadTwoRegisters(instruction,
6684 temp_loc,
6685 obj_loc,
6686 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006687 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006688 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006689 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006690 __ Bind(&loop);
6691 if (cls.IsRegister()) {
6692 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6693 } else {
6694 DCHECK(cls.IsStackSlot()) << cls;
6695 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6696 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006697 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006698
Roland Levillain0d5a2812015-11-13 10:07:31 +00006699 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006700 GenerateReferenceLoadOneRegister(instruction,
6701 temp_loc,
6702 super_offset,
6703 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006704 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006705
6706 // If the class reference currently in `temp` is not null, jump
6707 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006708 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006709 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006710 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006711 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006712 break;
6713 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006714
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006715 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006716 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006717 GenerateReferenceLoadTwoRegisters(instruction,
6718 temp_loc,
6719 obj_loc,
6720 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006721 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006722 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006723 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006724 if (cls.IsRegister()) {
6725 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6726 } else {
6727 DCHECK(cls.IsStackSlot()) << cls;
6728 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6729 }
6730 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006731
6732 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006733 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006734 GenerateReferenceLoadOneRegister(instruction,
6735 temp_loc,
6736 component_offset,
6737 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006738 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006739
6740 // If the component type is not null (i.e. the object is indeed
6741 // an array), jump to label `check_non_primitive_component_type`
6742 // to further check that this component type is not a primitive
6743 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006744 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006746 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006747 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006748 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006749 break;
6750 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006751
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006752 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006753 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006754 //
6755 // We cannot directly call the CheckCast runtime entry point
6756 // without resorting to a type checking slow path here (i.e. by
6757 // calling InvokeRuntime directly), as it would require to
6758 // assign fixed registers for the inputs of this HInstanceOf
6759 // instruction (following the runtime calling convention), which
6760 // might be cluttered by the potential first read barrier
6761 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006762 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006763 break;
6764 }
6765
Vladimir Marko175e7862018-03-27 09:03:13 +00006766 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006767 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6768 // We can not get false positives by doing this.
6769 // /* HeapReference<Class> */ temp = obj->klass_
6770 GenerateReferenceLoadTwoRegisters(instruction,
6771 temp_loc,
6772 obj_loc,
6773 class_offset,
6774 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006775
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006776 // /* HeapReference<Class> */ temp = temp->iftable_
6777 GenerateReferenceLoadTwoRegisters(instruction,
6778 temp_loc,
6779 temp_loc,
6780 iftable_offset,
6781 kWithoutReadBarrier);
6782 // Iftable is never null.
6783 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6784 // Maybe poison the `cls` for direct comparison with memory.
6785 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6786 // Loop through the iftable and check if any class matches.
6787 NearLabel start_loop;
6788 __ Bind(&start_loop);
6789 // Need to subtract first to handle the empty array case.
6790 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6791 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6792 // Go to next interface if the classes do not match.
6793 __ cmpl(cls.AsRegister<CpuRegister>(),
6794 CodeGeneratorX86_64::ArrayAddress(temp,
6795 maybe_temp2_loc,
6796 TIMES_4,
6797 object_array_data_offset));
6798 __ j(kNotEqual, &start_loop); // Return if same class.
6799 // If `cls` was poisoned above, unpoison it.
6800 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006801 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006802 }
6803
6804 case TypeCheckKind::kBitstringCheck: {
6805 // /* HeapReference<Class> */ temp = obj->klass_
6806 GenerateReferenceLoadTwoRegisters(instruction,
6807 temp_loc,
6808 obj_loc,
6809 class_offset,
6810 kWithoutReadBarrier);
6811
6812 GenerateBitstringTypeCheckCompare(instruction, temp);
6813 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6814 break;
6815 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006816 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006817
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006818 if (done.IsLinked()) {
6819 __ Bind(&done);
6820 }
6821
Roland Levillain0d5a2812015-11-13 10:07:31 +00006822 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006823}
6824
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006825void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006826 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6827 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006828 InvokeRuntimeCallingConvention calling_convention;
6829 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6830}
6831
6832void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006833 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006834 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006835 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006836 if (instruction->IsEnter()) {
6837 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6838 } else {
6839 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6840 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006841}
6842
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05306843void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6844 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6845 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6846 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6847 locations->SetInAt(0, Location::RequiresRegister());
6848 // There is no immediate variant of negated bitwise and in X86.
6849 locations->SetInAt(1, Location::RequiresRegister());
6850 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6851}
6852
6853void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6854 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6855 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6856 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6857 locations->SetInAt(0, Location::RequiresRegister());
6858 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6859}
6860
6861void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6862 LocationSummary* locations = instruction->GetLocations();
6863 Location first = locations->InAt(0);
6864 Location second = locations->InAt(1);
6865 Location dest = locations->Out();
6866 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
6867}
6868
6869void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6870 LocationSummary* locations = instruction->GetLocations();
6871 Location src = locations->InAt(0);
6872 Location dest = locations->Out();
6873 switch (instruction->GetOpKind()) {
6874 case HInstruction::kAnd:
6875 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6876 break;
6877 case HInstruction::kXor:
6878 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6879 break;
6880 default:
6881 LOG(FATAL) << "Unreachable";
6882 }
6883}
6884
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006885void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6886void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6887void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6888
6889void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6890 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006891 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006892 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6893 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006894 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006895 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006896 locations->SetOut(Location::SameAsFirstInput());
6897}
6898
6899void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6900 HandleBitwiseOperation(instruction);
6901}
6902
6903void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6904 HandleBitwiseOperation(instruction);
6905}
6906
6907void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6908 HandleBitwiseOperation(instruction);
6909}
6910
6911void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6912 LocationSummary* locations = instruction->GetLocations();
6913 Location first = locations->InAt(0);
6914 Location second = locations->InAt(1);
6915 DCHECK(first.Equals(locations->Out()));
6916
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006917 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006918 if (second.IsRegister()) {
6919 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006920 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006921 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006922 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006923 } else {
6924 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006925 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006926 }
6927 } else if (second.IsConstant()) {
6928 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6929 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006930 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006931 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006932 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006933 } else {
6934 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006935 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006936 }
6937 } else {
6938 Address address(CpuRegister(RSP), second.GetStackIndex());
6939 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006940 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006941 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006942 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006943 } else {
6944 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006945 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006946 }
6947 }
6948 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006949 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006950 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6951 bool second_is_constant = false;
6952 int64_t value = 0;
6953 if (second.IsConstant()) {
6954 second_is_constant = true;
6955 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006956 }
Mark Mendell40741f32015-04-20 22:10:34 -04006957 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006958
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006959 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006960 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006961 if (is_int32_value) {
6962 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
6963 } else {
6964 __ andq(first_reg, codegen_->LiteralInt64Address(value));
6965 }
6966 } else if (second.IsDoubleStackSlot()) {
6967 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006968 } else {
6969 __ andq(first_reg, second.AsRegister<CpuRegister>());
6970 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006971 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006972 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006973 if (is_int32_value) {
6974 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
6975 } else {
6976 __ orq(first_reg, codegen_->LiteralInt64Address(value));
6977 }
6978 } else if (second.IsDoubleStackSlot()) {
6979 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006980 } else {
6981 __ orq(first_reg, second.AsRegister<CpuRegister>());
6982 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006983 } else {
6984 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006985 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04006986 if (is_int32_value) {
6987 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
6988 } else {
6989 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
6990 }
6991 } else if (second.IsDoubleStackSlot()) {
6992 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006993 } else {
6994 __ xorq(first_reg, second.AsRegister<CpuRegister>());
6995 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006996 }
6997 }
6998}
6999
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007000void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7001 HInstruction* instruction,
7002 Location out,
7003 uint32_t offset,
7004 Location maybe_temp,
7005 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007006 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007007 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007008 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007009 if (kUseBakerReadBarrier) {
7010 // Load with fast path based Baker's read barrier.
7011 // /* HeapReference<Object> */ out = *(out + offset)
7012 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007013 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007014 } else {
7015 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007016 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007017 // in the following move operation, as we will need it for the
7018 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007019 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007020 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007021 // /* HeapReference<Object> */ out = *(out + offset)
7022 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007023 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007024 }
7025 } else {
7026 // Plain load with no read barrier.
7027 // /* HeapReference<Object> */ out = *(out + offset)
7028 __ movl(out_reg, Address(out_reg, offset));
7029 __ MaybeUnpoisonHeapReference(out_reg);
7030 }
7031}
7032
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007033void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7034 HInstruction* instruction,
7035 Location out,
7036 Location obj,
7037 uint32_t offset,
7038 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007039 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7040 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007041 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007042 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007043 if (kUseBakerReadBarrier) {
7044 // Load with fast path based Baker's read barrier.
7045 // /* HeapReference<Object> */ out = *(obj + offset)
7046 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007047 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007048 } else {
7049 // Load with slow path based read barrier.
7050 // /* HeapReference<Object> */ out = *(obj + offset)
7051 __ movl(out_reg, Address(obj_reg, offset));
7052 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7053 }
7054 } else {
7055 // Plain load with no read barrier.
7056 // /* HeapReference<Object> */ out = *(obj + offset)
7057 __ movl(out_reg, Address(obj_reg, offset));
7058 __ MaybeUnpoisonHeapReference(out_reg);
7059 }
7060}
7061
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007062void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7063 HInstruction* instruction,
7064 Location root,
7065 const Address& address,
7066 Label* fixup_label,
7067 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007068 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007069 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007070 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007071 if (kUseBakerReadBarrier) {
7072 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7073 // Baker's read barrier are used:
7074 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007075 // root = obj.field;
7076 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7077 // if (temp != null) {
7078 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007079 // }
7080
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007081 // /* GcRoot<mirror::Object> */ root = *address
7082 __ movl(root_reg, address);
7083 if (fixup_label != nullptr) {
7084 __ Bind(fixup_label);
7085 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007086 static_assert(
7087 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7088 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7089 "have different sizes.");
7090 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7091 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7092 "have different sizes.");
7093
Vladimir Marko953437b2016-08-24 08:30:46 +00007094 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007095 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007096 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007097 codegen_->AddSlowPath(slow_path);
7098
Roland Levillaind966ce72017-02-09 16:20:14 +00007099 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7100 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007101 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007102 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007103 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007104 __ j(kNotEqual, slow_path->GetEntryLabel());
7105 __ Bind(slow_path->GetExitLabel());
7106 } else {
7107 // GC root loaded through a slow path for read barriers other
7108 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007109 // /* GcRoot<mirror::Object>* */ root = address
7110 __ leaq(root_reg, address);
7111 if (fixup_label != nullptr) {
7112 __ Bind(fixup_label);
7113 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007114 // /* mirror::Object* */ root = root->Read()
7115 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7116 }
7117 } else {
7118 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007119 // /* GcRoot<mirror::Object> */ root = *address
7120 __ movl(root_reg, address);
7121 if (fixup_label != nullptr) {
7122 __ Bind(fixup_label);
7123 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007124 // Note that GC roots are not affected by heap poisoning, thus we
7125 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007126 }
7127}
7128
7129void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7130 Location ref,
7131 CpuRegister obj,
7132 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007133 bool needs_null_check) {
7134 DCHECK(kEmitCompilerReadBarrier);
7135 DCHECK(kUseBakerReadBarrier);
7136
7137 // /* HeapReference<Object> */ ref = *(obj + offset)
7138 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007139 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007140}
7141
7142void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7143 Location ref,
7144 CpuRegister obj,
7145 uint32_t data_offset,
7146 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007147 bool needs_null_check) {
7148 DCHECK(kEmitCompilerReadBarrier);
7149 DCHECK(kUseBakerReadBarrier);
7150
Roland Levillain3d312422016-06-23 13:53:42 +01007151 static_assert(
7152 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7153 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007154 // /* HeapReference<Object> */ ref =
7155 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007156 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007157 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007158}
7159
7160void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7161 Location ref,
7162 CpuRegister obj,
7163 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007164 bool needs_null_check,
7165 bool always_update_field,
7166 CpuRegister* temp1,
7167 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007168 DCHECK(kEmitCompilerReadBarrier);
7169 DCHECK(kUseBakerReadBarrier);
7170
7171 // In slow path based read barriers, the read barrier call is
7172 // inserted after the original load. However, in fast path based
7173 // Baker's read barriers, we need to perform the load of
7174 // mirror::Object::monitor_ *before* the original reference load.
7175 // This load-load ordering is required by the read barrier.
7176 // The fast path/slow path (for Baker's algorithm) should look like:
7177 //
7178 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7179 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7180 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007181 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007182 // if (is_gray) {
7183 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7184 // }
7185 //
7186 // Note: the original implementation in ReadBarrier::Barrier is
7187 // slightly more complex as:
7188 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007189 // the high-bits of rb_state, which are expected to be all zeroes
7190 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7191 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007192 // - it performs additional checks that we do not do here for
7193 // performance reasons.
7194
7195 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007196 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7197
Vladimir Marko953437b2016-08-24 08:30:46 +00007198 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007199 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007200 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007201 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7202 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7203 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7204
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007205 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007206 // ref = ReadBarrier::Mark(ref);
7207 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7208 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007209 if (needs_null_check) {
7210 MaybeRecordImplicitNullCheck(instruction);
7211 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007212
7213 // Load fence to prevent load-load reordering.
7214 // Note that this is a no-op, thanks to the x86-64 memory model.
7215 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7216
7217 // The actual reference load.
7218 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007219 __ movl(ref_reg, src); // Flags are unaffected.
7220
7221 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7222 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007223 SlowPathCode* slow_path;
7224 if (always_update_field) {
7225 DCHECK(temp1 != nullptr);
7226 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007227 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007228 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007229 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007230 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007231 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007232 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007233 AddSlowPath(slow_path);
7234
7235 // We have done the "if" of the gray bit check above, now branch based on the flags.
7236 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007237
7238 // Object* ref = ref_addr->AsMirrorPtr()
7239 __ MaybeUnpoisonHeapReference(ref_reg);
7240
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007241 __ Bind(slow_path->GetExitLabel());
7242}
7243
7244void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7245 Location out,
7246 Location ref,
7247 Location obj,
7248 uint32_t offset,
7249 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007250 DCHECK(kEmitCompilerReadBarrier);
7251
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007252 // Insert a slow path based read barrier *after* the reference load.
7253 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007254 // If heap poisoning is enabled, the unpoisoning of the loaded
7255 // reference will be carried out by the runtime within the slow
7256 // path.
7257 //
7258 // Note that `ref` currently does not get unpoisoned (when heap
7259 // poisoning is enabled), which is alright as the `ref` argument is
7260 // not used by the artReadBarrierSlow entry point.
7261 //
7262 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007263 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007264 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7265 AddSlowPath(slow_path);
7266
Roland Levillain0d5a2812015-11-13 10:07:31 +00007267 __ jmp(slow_path->GetEntryLabel());
7268 __ Bind(slow_path->GetExitLabel());
7269}
7270
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007271void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7272 Location out,
7273 Location ref,
7274 Location obj,
7275 uint32_t offset,
7276 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007277 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007278 // Baker's read barriers shall be handled by the fast path
7279 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7280 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007281 // If heap poisoning is enabled, unpoisoning will be taken care of
7282 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007283 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007284 } else if (kPoisonHeapReferences) {
7285 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7286 }
7287}
7288
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007289void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7290 Location out,
7291 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007292 DCHECK(kEmitCompilerReadBarrier);
7293
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007294 // Insert a slow path based read barrier *after* the GC root load.
7295 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007296 // Note that GC roots are not affected by heap poisoning, so we do
7297 // not need to do anything special for this here.
7298 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007299 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007300 AddSlowPath(slow_path);
7301
Roland Levillain0d5a2812015-11-13 10:07:31 +00007302 __ jmp(slow_path->GetEntryLabel());
7303 __ Bind(slow_path->GetExitLabel());
7304}
7305
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007306void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007307 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007308 LOG(FATAL) << "Unreachable";
7309}
7310
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007311void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007312 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007313 LOG(FATAL) << "Unreachable";
7314}
7315
Mark Mendellfe57faa2015-09-18 09:26:15 -04007316// Simple implementation of packed switch - generate cascaded compare/jumps.
7317void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7318 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007319 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007320 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007321 locations->AddTemp(Location::RequiresRegister());
7322 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007323}
7324
7325void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7326 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007327 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007328 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007329 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7330 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7331 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007332 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7333
7334 // Should we generate smaller inline compare/jumps?
7335 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7336 // Figure out the correct compare values and jump conditions.
7337 // Handle the first compare/branch as a special case because it might
7338 // jump to the default case.
7339 DCHECK_GT(num_entries, 2u);
7340 Condition first_condition;
7341 uint32_t index;
7342 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7343 if (lower_bound != 0) {
7344 first_condition = kLess;
7345 __ cmpl(value_reg_in, Immediate(lower_bound));
7346 __ j(first_condition, codegen_->GetLabelOf(default_block));
7347 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7348
7349 index = 1;
7350 } else {
7351 // Handle all the compare/jumps below.
7352 first_condition = kBelow;
7353 index = 0;
7354 }
7355
7356 // Handle the rest of the compare/jumps.
7357 for (; index + 1 < num_entries; index += 2) {
7358 int32_t compare_to_value = lower_bound + index + 1;
7359 __ cmpl(value_reg_in, Immediate(compare_to_value));
7360 // Jump to successors[index] if value < case_value[index].
7361 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7362 // Jump to successors[index + 1] if value == case_value[index + 1].
7363 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7364 }
7365
7366 if (index != num_entries) {
7367 // There are an odd number of entries. Handle the last one.
7368 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007369 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007370 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7371 }
7372
7373 // And the default for any other value.
7374 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7375 __ jmp(codegen_->GetLabelOf(default_block));
7376 }
7377 return;
7378 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007379
7380 // Remove the bias, if needed.
7381 Register value_reg_out = value_reg_in.AsRegister();
7382 if (lower_bound != 0) {
7383 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7384 value_reg_out = temp_reg.AsRegister();
7385 }
7386 CpuRegister value_reg(value_reg_out);
7387
7388 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007389 __ cmpl(value_reg, Immediate(num_entries - 1));
7390 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007391
Mark Mendell9c86b482015-09-18 13:36:07 -04007392 // We are in the range of the table.
7393 // Load the address of the jump table in the constant area.
7394 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007395
Mark Mendell9c86b482015-09-18 13:36:07 -04007396 // Load the (signed) offset from the jump table.
7397 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7398
7399 // Add the offset to the address of the table base.
7400 __ addq(temp_reg, base_reg);
7401
7402 // And jump.
7403 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007404}
7405
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007406void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7407 ATTRIBUTE_UNUSED) {
7408 LOG(FATAL) << "Unreachable";
7409}
7410
7411void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7412 ATTRIBUTE_UNUSED) {
7413 LOG(FATAL) << "Unreachable";
7414}
7415
Aart Bikc5d47542016-01-27 17:00:35 -08007416void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7417 if (value == 0) {
7418 __ xorl(dest, dest);
7419 } else {
7420 __ movl(dest, Immediate(value));
7421 }
7422}
7423
Mark Mendell92e83bf2015-05-07 11:25:03 -04007424void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7425 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007426 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007427 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007428 } else if (IsUint<32>(value)) {
7429 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007430 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7431 } else {
7432 __ movq(dest, Immediate(value));
7433 }
7434}
7435
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007436void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7437 if (value == 0) {
7438 __ xorps(dest, dest);
7439 } else {
7440 __ movss(dest, LiteralInt32Address(value));
7441 }
7442}
7443
7444void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7445 if (value == 0) {
7446 __ xorpd(dest, dest);
7447 } else {
7448 __ movsd(dest, LiteralInt64Address(value));
7449 }
7450}
7451
7452void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7453 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7454}
7455
7456void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7457 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7458}
7459
Aart Bika19616e2016-02-01 18:57:58 -08007460void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7461 if (value == 0) {
7462 __ testl(dest, dest);
7463 } else {
7464 __ cmpl(dest, Immediate(value));
7465 }
7466}
7467
7468void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7469 if (IsInt<32>(value)) {
7470 if (value == 0) {
7471 __ testq(dest, dest);
7472 } else {
7473 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7474 }
7475 } else {
7476 // Value won't fit in an int.
7477 __ cmpq(dest, LiteralInt64Address(value));
7478 }
7479}
7480
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007481void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7482 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007483 GenerateIntCompare(lhs_reg, rhs);
7484}
7485
7486void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007487 if (rhs.IsConstant()) {
7488 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007489 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007490 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007491 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007492 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007493 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007494 }
7495}
7496
7497void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7498 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7499 if (rhs.IsConstant()) {
7500 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7501 Compare64BitValue(lhs_reg, value);
7502 } else if (rhs.IsDoubleStackSlot()) {
7503 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7504 } else {
7505 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7506 }
7507}
7508
7509Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7510 Location index,
7511 ScaleFactor scale,
7512 uint32_t data_offset) {
7513 return index.IsConstant() ?
7514 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7515 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7516}
7517
Mark Mendellcfa410b2015-05-25 16:02:44 -04007518void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7519 DCHECK(dest.IsDoubleStackSlot());
7520 if (IsInt<32>(value)) {
7521 // Can move directly as an int32 constant.
7522 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7523 Immediate(static_cast<int32_t>(value)));
7524 } else {
7525 Load64BitValue(CpuRegister(TMP), value);
7526 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7527 }
7528}
7529
Mark Mendell9c86b482015-09-18 13:36:07 -04007530/**
7531 * Class to handle late fixup of offsets into constant area.
7532 */
7533class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7534 public:
7535 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7536 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7537
7538 protected:
7539 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7540
7541 CodeGeneratorX86_64* codegen_;
7542
7543 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007544 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007545 // Patch the correct offset for the instruction. We use the address of the
7546 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7547 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7548 int32_t relative_position = constant_offset - pos;
7549
7550 // Patch in the right value.
7551 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7552 }
7553
7554 // Location in constant area that the fixup refers to.
7555 size_t offset_into_constant_area_;
7556};
7557
7558/**
7559 t * Class to handle late fixup of offsets to a jump table that will be created in the
7560 * constant area.
7561 */
7562class JumpTableRIPFixup : public RIPFixup {
7563 public:
7564 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7565 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7566
7567 void CreateJumpTable() {
7568 X86_64Assembler* assembler = codegen_->GetAssembler();
7569
7570 // Ensure that the reference to the jump table has the correct offset.
7571 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7572 SetOffset(offset_in_constant_table);
7573
7574 // Compute the offset from the start of the function to this jump table.
7575 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7576
7577 // Populate the jump table with the correct values for the jump table.
7578 int32_t num_entries = switch_instr_->GetNumEntries();
7579 HBasicBlock* block = switch_instr_->GetBlock();
7580 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7581 // The value that we want is the target offset - the position of the table.
7582 for (int32_t i = 0; i < num_entries; i++) {
7583 HBasicBlock* b = successors[i];
7584 Label* l = codegen_->GetLabelOf(b);
7585 DCHECK(l->IsBound());
7586 int32_t offset_to_block = l->Position() - current_table_offset;
7587 assembler->AppendInt32(offset_to_block);
7588 }
7589 }
7590
7591 private:
7592 const HPackedSwitch* switch_instr_;
7593};
7594
Mark Mendellf55c3e02015-03-26 21:07:46 -04007595void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7596 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007597 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007598 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7599 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007600 assembler->Align(4, 0);
7601 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007602
7603 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007604 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007605 jump_table->CreateJumpTable();
7606 }
7607
7608 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007609 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007610 }
7611
7612 // And finish up.
7613 CodeGenerator::Finalize(allocator);
7614}
7615
Mark Mendellf55c3e02015-03-26 21:07:46 -04007616Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007617 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007618 return Address::RIP(fixup);
7619}
7620
7621Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007622 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007623 return Address::RIP(fixup);
7624}
7625
7626Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007627 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007628 return Address::RIP(fixup);
7629}
7630
7631Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007632 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007633 return Address::RIP(fixup);
7634}
7635
Andreas Gampe85b62f22015-09-09 13:15:38 -07007636// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007637void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007638 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007639 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007640 return;
7641 }
7642
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007643 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007644
7645 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7646 if (trg.Equals(return_loc)) {
7647 return;
7648 }
7649
7650 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007651 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007652 parallel_move.AddMove(return_loc, trg, type, nullptr);
7653 GetMoveResolver()->EmitNativeCode(&parallel_move);
7654}
7655
Mark Mendell9c86b482015-09-18 13:36:07 -04007656Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7657 // Create a fixup to be used to create and address the jump table.
7658 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007659 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007660
7661 // We have to populate the jump tables.
7662 fixups_to_jump_tables_.push_back(table_fixup);
7663 return Address::RIP(table_fixup);
7664}
7665
Mark Mendellea5af682015-10-22 17:35:49 -04007666void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7667 const Address& addr_high,
7668 int64_t v,
7669 HInstruction* instruction) {
7670 if (IsInt<32>(v)) {
7671 int32_t v_32 = v;
7672 __ movq(addr_low, Immediate(v_32));
7673 MaybeRecordImplicitNullCheck(instruction);
7674 } else {
7675 // Didn't fit in a register. Do it in pieces.
7676 int32_t low_v = Low32Bits(v);
7677 int32_t high_v = High32Bits(v);
7678 __ movl(addr_low, Immediate(low_v));
7679 MaybeRecordImplicitNullCheck(instruction);
7680 __ movl(addr_high, Immediate(high_v));
7681 }
7682}
7683
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007684void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7685 const uint8_t* roots_data,
7686 const PatchInfo<Label>& info,
7687 uint64_t index_in_table) const {
7688 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7689 uintptr_t address =
7690 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007691 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007692 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7693 dchecked_integral_cast<uint32_t>(address);
7694}
7695
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007696void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7697 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007698 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007699 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007700 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007701 }
7702
7703 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007704 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007705 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007706 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007707 }
7708}
7709
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307710bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7711 return codegen_->GetInstructionSetFeatures().HasAVX();
7712}
7713
7714bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7715 return codegen_->GetInstructionSetFeatures().HasAVX2();
7716}
7717
7718bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7719 return codegen_->GetInstructionSetFeatures().HasAVX();
7720}
7721
7722bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7723 return codegen_->GetInstructionSetFeatures().HasAVX2();
7724}
7725
Roland Levillain4d027112015-07-01 15:41:14 +01007726#undef __
7727
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007728} // namespace x86_64
7729} // namespace art