blob: 117277697db324e320cb854cc4e76546f216473a [file] [log] [blame]
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000019#include "art_method-inl.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010020#include "class_table.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070026#include "heap_poisoning.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080027#include "intrinsics.h"
28#include "intrinsics_x86_64.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000029#include "jit/profiling_info.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Andreas Gamped4901292017-05-30 18:41:34 -070031#include "lock_word.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070032#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070033#include "mirror/class-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010034#include "mirror/object_reference.h"
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +000035#include "scoped_thread_state_change-inl.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010036#include "thread.h"
37#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010038#include "utils/stack_checks.h"
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010039#include "utils/x86_64/assembler_x86_64.h"
40#include "utils/x86_64/managed_register_x86_64.h"
41
Vladimir Marko0a516052019-10-14 13:00:44 +000042namespace art {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010043
Roland Levillain0d5a2812015-11-13 10:07:31 +000044template<class MirrorType>
45class GcRoot;
46
Nicolas Geoffray9cf35522014-06-09 18:40:10 +010047namespace x86_64 {
48
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010049static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010050static constexpr Register kMethodRegisterArgument = RDI;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000051// The compare/jump sequence will generate about (1.5 * num_entries) instructions. A jump
52// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
53// generates less code/data with a small num_entries.
54static constexpr uint32_t kPackedSwitchJumpTableThreshold = 5;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010055
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +000056static constexpr Register kCoreCalleeSaves[] = { RBX, RBP, R12, R13, R14, R15 };
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000057static constexpr FloatRegister kFpuCalleeSaves[] = { XMM12, XMM13, XMM14, XMM15 };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010058
Mark Mendell24f2dfa2015-01-14 19:51:45 -050059static constexpr int kC2ConditionMask = 0x400;
60
Vladimir Marko3232dbb2018-07-25 15:42:46 +010061static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
62 // Custom calling convention: RAX serves as both input and output.
63 RegisterSet caller_saves = RegisterSet::Empty();
64 caller_saves.Add(Location::RegisterLocation(RAX));
65 return caller_saves;
66}
67
Roland Levillain7cbd27f2016-08-11 23:53:33 +010068// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
69#define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070070#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071
Andreas Gampe85b62f22015-09-09 13:15:38 -070072class NullCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010073 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000074 explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010075
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010076 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +000077 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010078 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000079 if (instruction_->CanThrowIntoCatchBlock()) {
80 // Live registers will be restored in the catch block if caught.
81 SaveLiveRegisters(codegen, instruction_->GetLocations());
82 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010083 x86_64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Roland Levillain0d5a2812015-11-13 10:07:31 +000084 instruction_,
85 instruction_->GetDexPc(),
86 this);
Roland Levillain888d0672015-11-23 18:53:50 +000087 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010088 }
89
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010090 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +010091
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010092 const char* GetDescription() const override { return "NullCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +010093
Nicolas Geoffraye5038322014-07-04 09:41:32 +010094 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010095 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
96};
97
Andreas Gampe85b62f22015-09-09 13:15:38 -070098class DivZeroCheckSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000099 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000100 explicit DivZeroCheckSlowPathX86_64(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000101
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100102 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000103 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +0000104 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100105 x86_64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000106 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +0000107 }
108
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100109 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 const char* GetDescription() const override { return "DivZeroCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100112
Calin Juravled0d48522014-11-04 16:40:20 +0000113 private:
Calin Juravled0d48522014-11-04 16:40:20 +0000114 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86_64);
115};
116
Andreas Gampe85b62f22015-09-09 13:15:38 -0700117class DivRemMinusOneSlowPathX86_64 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000118 public:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100119 DivRemMinusOneSlowPathX86_64(HInstruction* at, Register reg, DataType::Type type, bool is_div)
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 : SlowPathCode(at), cpu_reg_(CpuRegister(reg)), type_(type), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000121
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100122 void EmitNativeCode(CodeGenerator* codegen) override {
Calin Juravled0d48522014-11-04 16:40:20 +0000123 __ Bind(GetEntryLabel());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100124 if (type_ == DataType::Type::kInt32) {
Calin Juravlebacfec32014-11-14 15:54:36 +0000125 if (is_div_) {
126 __ negl(cpu_reg_);
127 } else {
Mark Mendellcfa410b2015-05-25 16:02:44 -0400128 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000129 }
130
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000131 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100132 DCHECK_EQ(DataType::Type::kInt64, type_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000133 if (is_div_) {
134 __ negq(cpu_reg_);
135 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -0400136 __ xorl(cpu_reg_, cpu_reg_);
Calin Juravlebacfec32014-11-14 15:54:36 +0000137 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +0000138 }
Calin Juravled0d48522014-11-04 16:40:20 +0000139 __ jmp(GetExitLabel());
140 }
141
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100142 const char* GetDescription() const override { return "DivRemMinusOneSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100143
Calin Juravled0d48522014-11-04 16:40:20 +0000144 private:
Calin Juravlebacfec32014-11-14 15:54:36 +0000145 const CpuRegister cpu_reg_;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100146 const DataType::Type type_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000147 const bool is_div_;
148 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86_64);
Calin Juravled0d48522014-11-04 16:40:20 +0000149};
150
Andreas Gampe85b62f22015-09-09 13:15:38 -0700151class SuspendCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000152 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100153 SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000154 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000155
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100156 void EmitNativeCode(CodeGenerator* codegen) override {
Aart Bikb13c65b2017-03-21 20:14:07 -0700157 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000158 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000159 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700160 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100161 x86_64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000162 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700163 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100164 if (successor_ == nullptr) {
165 __ jmp(GetReturnLabel());
166 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000167 __ jmp(x86_64_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100168 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000169 }
170
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100171 Label* GetReturnLabel() {
172 DCHECK(successor_ == nullptr);
173 return &return_label_;
174 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000175
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100176 HBasicBlock* GetSuccessor() const {
177 return successor_;
178 }
179
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100180 const char* GetDescription() const override { return "SuspendCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100181
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000182 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100183 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184 Label return_label_;
185
186 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
187};
188
Andreas Gampe85b62f22015-09-09 13:15:38 -0700189class BoundsCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100190 public:
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100191 explicit BoundsCheckSlowPathX86_64(HBoundsCheck* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000192 : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100193
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100194 void EmitNativeCode(CodeGenerator* codegen) override {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100195 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000196 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100197 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000198 if (instruction_->CanThrowIntoCatchBlock()) {
199 // Live registers will be restored in the catch block if caught.
200 SaveLiveRegisters(codegen, instruction_->GetLocations());
201 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400202 // Are we using an array length from memory?
203 HInstruction* array_length = instruction_->InputAt(1);
204 Location length_loc = locations->InAt(1);
205 InvokeRuntimeCallingConvention calling_convention;
206 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
207 // Load the array length into our temporary.
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100208 HArrayLength* length = array_length->AsArrayLength();
Nicolas Geoffray003444a2017-10-17 10:58:42 +0100209 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(length);
Mark Mendellee8d9712016-07-12 11:13:15 -0400210 Location array_loc = array_length->GetLocations()->InAt(0);
211 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
212 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
213 // Check for conflicts with index.
214 if (length_loc.Equals(locations->InAt(0))) {
215 // We know we aren't using parameter 2.
216 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
217 }
218 __ movl(length_loc.AsRegister<CpuRegister>(), array_len);
Nicolas Geoffray0aff3a82017-10-13 13:12:36 +0100219 if (mirror::kUseStringCompression && length->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100220 __ shrl(length_loc.AsRegister<CpuRegister>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700221 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400222 }
223
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000224 // We're moving two locations to locations that could overlap, so we need a parallel
225 // move resolver.
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000226 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100229 DataType::Type::kInt32,
Mark Mendellee8d9712016-07-12 11:13:15 -0400230 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100231 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100232 DataType::Type::kInt32);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100233 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
234 ? kQuickThrowStringBounds
235 : kQuickThrowArrayBounds;
236 x86_64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100237 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000238 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100239 }
240
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100241 bool IsFatal() const override { return true; }
Alexandre Rames8158f282015-08-07 10:26:17 +0100242
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100243 const char* GetDescription() const override { return "BoundsCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100244
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100245 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100246 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
247};
248
Andreas Gampe85b62f22015-09-09 13:15:38 -0700249class LoadClassSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100250 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100251 LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
252 : SlowPathCode(at), cls_(cls) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000253 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100254 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100256
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100257 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100259 Location out = locations->Out();
260 const uint32_t dex_pc = instruction_->GetDexPc();
261 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
262 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
263
Roland Levillain0d5a2812015-11-13 10:07:31 +0000264 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100265 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000266 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000267
Vladimir Markoea4c1262017-02-06 19:59:33 +0000268 // Custom calling convention: RAX serves as both input and output.
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100269 if (must_resolve_type) {
270 DCHECK(IsSameDexFile(cls_->GetDexFile(), x86_64_codegen->GetGraph()->GetDexFile()));
271 dex::TypeIndex type_index = cls_->GetTypeIndex();
272 __ movl(CpuRegister(RAX), Immediate(type_index.index_));
Vladimir Marko9d479252018-07-24 11:35:20 +0100273 x86_64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
274 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100275 // If we also must_do_clinit, the resolved type is now in the correct register.
276 } else {
277 DCHECK(must_do_clinit);
278 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
279 x86_64_codegen->Move(Location::RegisterLocation(RAX), source);
280 }
281 if (must_do_clinit) {
282 x86_64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
283 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Roland Levillain888d0672015-11-23 18:53:50 +0000284 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100285
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000287 if (out.IsValid()) {
288 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Roland Levillain0d5a2812015-11-13 10:07:31 +0000289 x86_64_codegen->Move(out, Location::RegisterLocation(RAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000290 }
291
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000292 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100293 __ jmp(GetExitLabel());
294 }
295
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100296 const char* GetDescription() const override { return "LoadClassSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100297
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100298 private:
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000299 // The class this slow path will load.
300 HLoadClass* const cls_;
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100301
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000302 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100303};
304
Vladimir Markoaad75c62016-10-03 08:46:48 +0000305class LoadStringSlowPathX86_64 : public SlowPathCode {
306 public:
307 explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {}
308
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100309 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Markoaad75c62016-10-03 08:46:48 +0000310 LocationSummary* locations = instruction_->GetLocations();
311 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
312
313 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
314 __ Bind(GetEntryLabel());
315 SaveLiveRegisters(codegen, locations);
316
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000317 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100318 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000319 __ movl(CpuRegister(RAX), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000320 x86_64_codegen->InvokeRuntime(kQuickResolveString,
321 instruction_,
322 instruction_->GetDexPc(),
323 this);
324 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
325 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
326 RestoreLiveRegisters(codegen, locations);
327
Vladimir Markoaad75c62016-10-03 08:46:48 +0000328 __ jmp(GetExitLabel());
329 }
330
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100331 const char* GetDescription() const override { return "LoadStringSlowPathX86_64"; }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000332
333 private:
334 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
335};
336
Andreas Gampe85b62f22015-09-09 13:15:38 -0700337class TypeCheckSlowPathX86_64 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000338 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000339 TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000341
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000343 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100344 uint32_t dex_pc = instruction_->GetDexPc();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 DCHECK(instruction_->IsCheckCast()
346 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000347
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000349 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000350
Vladimir Markoe619f6c2017-12-12 16:00:01 +0000351 if (kPoisonHeapReferences &&
352 instruction_->IsCheckCast() &&
353 instruction_->AsCheckCast()->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck) {
354 // First, unpoison the `cls` reference that was poisoned for direct memory comparison.
355 __ UnpoisonHeapReference(locations->InAt(1).AsRegister<CpuRegister>());
356 }
357
Vladimir Marko87584542017-12-12 17:47:52 +0000358 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000359 SaveLiveRegisters(codegen, locations);
360 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000361
362 // We're moving two locations to locations that could overlap, so we need a parallel
363 // move resolver.
364 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800366 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800368 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800369 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100370 DataType::Type::kReference);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000371 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100372 x86_64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800373 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 x86_64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000378 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000379
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000380 if (!is_fatal_) {
381 if (instruction_->IsInstanceOf()) {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000382 x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000383 }
Nicolas Geoffray75374372015-09-17 17:12:19 +0000384
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000385 RestoreLiveRegisters(codegen, locations);
386 __ jmp(GetExitLabel());
387 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000388 }
389
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100390 const char* GetDescription() const override { return "TypeCheckSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100391
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100392 bool IsFatal() const override { return is_fatal_; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000393
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000394 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000395 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000396
397 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
398};
399
Andreas Gampe85b62f22015-09-09 13:15:38 -0700400class DeoptimizationSlowPathX86_64 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700401 public:
Aart Bik42249c32016-01-07 15:33:50 -0800402 explicit DeoptimizationSlowPathX86_64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700404
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100405 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000406 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700407 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100408 LocationSummary* locations = instruction_->GetLocations();
409 SaveLiveRegisters(codegen, locations);
410 InvokeRuntimeCallingConvention calling_convention;
411 x86_64_codegen->Load32BitValue(
412 CpuRegister(calling_convention.GetRegisterAt(0)),
413 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100414 x86_64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100415 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700416 }
417
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100418 const char* GetDescription() const override { return "DeoptimizationSlowPathX86_64"; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100419
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700420 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700421 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86_64);
422};
423
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100424class ArraySetSlowPathX86_64 : public SlowPathCode {
425 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000426 explicit ArraySetSlowPathX86_64(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100427
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100428 void EmitNativeCode(CodeGenerator* codegen) override {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100429 LocationSummary* locations = instruction_->GetLocations();
430 __ Bind(GetEntryLabel());
431 SaveLiveRegisters(codegen, locations);
432
433 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100434 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100435 parallel_move.AddMove(
436 locations->InAt(0),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(1),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100444 nullptr);
445 parallel_move.AddMove(
446 locations->InAt(2),
447 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100448 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100449 nullptr);
450 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
451
Roland Levillain0d5a2812015-11-13 10:07:31 +0000452 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100453 x86_64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000454 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100455 RestoreLiveRegisters(codegen, locations);
456 __ jmp(GetExitLabel());
457 }
458
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100459 const char* GetDescription() const override { return "ArraySetSlowPathX86_64"; }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100460
461 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100462 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86_64);
463};
464
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100465// Slow path marking an object reference `ref` during a read
466// barrier. The field `obj.field` in the object `obj` holding this
467// reference does not get updated by this slow path after marking (see
468// ReadBarrierMarkAndUpdateFieldSlowPathX86_64 below for that).
469//
470// This means that after the execution of this slow path, `ref` will
471// always be up-to-date, but `obj.field` may not; i.e., after the
472// flip, `ref` will be a to-space reference, but `obj.field` will
473// probably still be a from-space reference (unless it gets updated by
474// another thread, or if another thread installed another object
475// reference (different from `ref`) in `obj.field`).
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000476class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
477 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 ReadBarrierMarkSlowPathX86_64(HInstruction* instruction,
479 Location ref,
480 bool unpoison_ref_before_marking)
481 : SlowPathCode(instruction),
482 ref_(ref),
483 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000484 DCHECK(kEmitCompilerReadBarrier);
485 }
486
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100487 const char* GetDescription() const override { return "ReadBarrierMarkSlowPathX86_64"; }
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000488
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100489 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000490 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100491 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
492 Register ref_reg = ref_cpu_reg.AsRegister();
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000493 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100494 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000495 DCHECK(instruction_->IsInstanceFieldGet() ||
496 instruction_->IsStaticFieldGet() ||
497 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100498 instruction_->IsArraySet() ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000499 instruction_->IsLoadClass() ||
500 instruction_->IsLoadString() ||
501 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100502 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100503 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
504 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000505 << "Unexpected instruction in read barrier marking slow path: "
506 << instruction_->DebugName();
507
508 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100509 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000510 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100511 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000512 }
Roland Levillain4359e612016-07-20 11:32:19 +0100513 // No need to save live registers; it's taken care of by the
514 // entrypoint. Also, there is no need to update the stack mask,
515 // as this runtime call will not trigger a garbage collection.
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000516 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100517 DCHECK_NE(ref_reg, RSP);
518 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100519 // "Compact" slow path, saving two moves.
520 //
521 // Instead of using the standard runtime calling convention (input
522 // and output in R0):
523 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100524 // RDI <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100525 // RAX <- ReadBarrierMark(RDI)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100526 // ref <- RAX
Roland Levillain02b75802016-07-13 11:54:35 +0100527 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100528 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100529 // of a dedicated entrypoint:
530 //
531 // rX <- ReadBarrierMarkRegX(rX)
532 //
533 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100534 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100535 // This runtime call does not require a stack map.
536 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000537 __ jmp(GetExitLabel());
538 }
539
540 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100541 // The location (register) of the marked object reference.
542 const Location ref_;
543 // Should the reference in `ref_` be unpoisoned prior to marking it?
544 const bool unpoison_ref_before_marking_;
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000545
546 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86_64);
547};
548
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100549// Slow path marking an object reference `ref` during a read barrier,
550// and if needed, atomically updating the field `obj.field` in the
551// object `obj` holding this reference after marking (contrary to
552// ReadBarrierMarkSlowPathX86_64 above, which never tries to update
553// `obj.field`).
554//
555// This means that after the execution of this slow path, both `ref`
556// and `obj.field` will be up-to-date; i.e., after the flip, both will
557// hold the same to-space reference (unless another thread installed
558// another object reference (different from `ref`) in `obj.field`).
559class ReadBarrierMarkAndUpdateFieldSlowPathX86_64 : public SlowPathCode {
560 public:
561 ReadBarrierMarkAndUpdateFieldSlowPathX86_64(HInstruction* instruction,
562 Location ref,
563 CpuRegister obj,
564 const Address& field_addr,
565 bool unpoison_ref_before_marking,
566 CpuRegister temp1,
567 CpuRegister temp2)
568 : SlowPathCode(instruction),
569 ref_(ref),
570 obj_(obj),
571 field_addr_(field_addr),
572 unpoison_ref_before_marking_(unpoison_ref_before_marking),
573 temp1_(temp1),
574 temp2_(temp2) {
575 DCHECK(kEmitCompilerReadBarrier);
576 }
577
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 const char* GetDescription() const override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100579 return "ReadBarrierMarkAndUpdateFieldSlowPathX86_64";
580 }
581
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100582 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100583 LocationSummary* locations = instruction_->GetLocations();
584 CpuRegister ref_cpu_reg = ref_.AsRegister<CpuRegister>();
585 Register ref_reg = ref_cpu_reg.AsRegister();
586 DCHECK(locations->CanCall());
587 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
588 // This slow path is only used by the UnsafeCASObject intrinsic.
589 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
590 << "Unexpected instruction in read barrier marking and field updating slow path: "
591 << instruction_->DebugName();
592 DCHECK(instruction_->GetLocations()->Intrinsified());
593 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
594
595 __ Bind(GetEntryLabel());
596 if (unpoison_ref_before_marking_) {
597 // Object* ref = ref_addr->AsMirrorPtr()
598 __ MaybeUnpoisonHeapReference(ref_cpu_reg);
599 }
600
601 // Save the old (unpoisoned) reference.
602 __ movl(temp1_, ref_cpu_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
608 DCHECK_NE(ref_reg, RSP);
609 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
610 // "Compact" slow path, saving two moves.
611 //
612 // Instead of using the standard runtime calling convention (input
613 // and output in R0):
614 //
615 // RDI <- ref
616 // RAX <- ReadBarrierMark(RDI)
617 // ref <- RAX
618 //
619 // we just use rX (the register containing `ref`) as input and output
620 // of a dedicated entrypoint:
621 //
622 // rX <- ReadBarrierMarkRegX(rX)
623 //
624 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100625 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100626 // This runtime call does not require a stack map.
627 x86_64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
628
629 // If the new reference is different from the old reference,
630 // update the field in the holder (`*field_addr`).
631 //
632 // Note that this field could also hold a different object, if
633 // another thread had concurrently changed it. In that case, the
634 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
635 // operation below would abort the CAS, leaving the field as-is.
636 NearLabel done;
637 __ cmpl(temp1_, ref_cpu_reg);
638 __ j(kEqual, &done);
639
640 // Update the the holder's field atomically. This may fail if
641 // mutator updates before us, but it's OK. This is achived
642 // using a strong compare-and-set (CAS) operation with relaxed
643 // memory synchronization ordering, where the expected value is
644 // the old reference and the desired value is the new reference.
645 // This operation is implemented with a 32-bit LOCK CMPXLCHG
646 // instruction, which requires the expected value (the old
647 // reference) to be in EAX. Save RAX beforehand, and move the
648 // expected value (stored in `temp1_`) into EAX.
649 __ movq(temp2_, CpuRegister(RAX));
650 __ movl(CpuRegister(RAX), temp1_);
651
652 // Convenience aliases.
653 CpuRegister base = obj_;
654 CpuRegister expected = CpuRegister(RAX);
655 CpuRegister value = ref_cpu_reg;
656
657 bool base_equals_value = (base.AsRegister() == value.AsRegister());
658 Register value_reg = ref_reg;
659 if (kPoisonHeapReferences) {
660 if (base_equals_value) {
661 // If `base` and `value` are the same register location, move
662 // `value_reg` to a temporary register. This way, poisoning
663 // `value_reg` won't invalidate `base`.
664 value_reg = temp1_.AsRegister();
665 __ movl(CpuRegister(value_reg), base);
666 }
667
668 // Check that the register allocator did not assign the location
669 // of `expected` (RAX) to `value` nor to `base`, so that heap
670 // poisoning (when enabled) works as intended below.
671 // - If `value` were equal to `expected`, both references would
672 // be poisoned twice, meaning they would not be poisoned at
673 // all, as heap poisoning uses address negation.
674 // - If `base` were equal to `expected`, poisoning `expected`
675 // would invalidate `base`.
676 DCHECK_NE(value_reg, expected.AsRegister());
677 DCHECK_NE(base.AsRegister(), expected.AsRegister());
678
679 __ PoisonHeapReference(expected);
680 __ PoisonHeapReference(CpuRegister(value_reg));
681 }
682
683 __ LockCmpxchgl(field_addr_, CpuRegister(value_reg));
684
685 // If heap poisoning is enabled, we need to unpoison the values
686 // that were poisoned earlier.
687 if (kPoisonHeapReferences) {
688 if (base_equals_value) {
689 // `value_reg` has been moved to a temporary register, no need
690 // to unpoison it.
691 } else {
692 __ UnpoisonHeapReference(CpuRegister(value_reg));
693 }
694 // No need to unpoison `expected` (RAX), as it is be overwritten below.
695 }
696
697 // Restore RAX.
698 __ movq(CpuRegister(RAX), temp2_);
699
700 __ Bind(&done);
701 __ jmp(GetExitLabel());
702 }
703
704 private:
705 // The location (register) of the marked object reference.
706 const Location ref_;
707 // The register containing the object holding the marked object reference field.
708 const CpuRegister obj_;
709 // The address of the marked reference field. The base of this address must be `obj_`.
710 const Address field_addr_;
711
712 // Should the reference in `ref_` be unpoisoned prior to marking it?
713 const bool unpoison_ref_before_marking_;
714
715 const CpuRegister temp1_;
716 const CpuRegister temp2_;
717
718 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86_64);
719};
720
Roland Levillain0d5a2812015-11-13 10:07:31 +0000721// Slow path generating a read barrier for a heap reference.
722class ReadBarrierForHeapReferenceSlowPathX86_64 : public SlowPathCode {
723 public:
724 ReadBarrierForHeapReferenceSlowPathX86_64(HInstruction* instruction,
725 Location out,
726 Location ref,
727 Location obj,
728 uint32_t offset,
729 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000730 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000731 out_(out),
732 ref_(ref),
733 obj_(obj),
734 offset_(offset),
735 index_(index) {
736 DCHECK(kEmitCompilerReadBarrier);
737 // If `obj` is equal to `out` or `ref`, it means the initial
738 // object has been overwritten by (or after) the heap object
739 // reference load to be instrumented, e.g.:
740 //
741 // __ movl(out, Address(out, offset));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000742 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000743 //
744 // In that case, we have lost the information about the original
745 // object, and the emitted read barrier cannot work properly.
746 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
747 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
748}
749
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100750 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000751 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
752 LocationSummary* locations = instruction_->GetLocations();
753 CpuRegister reg_out = out_.AsRegister<CpuRegister>();
754 DCHECK(locations->CanCall());
755 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out.AsRegister())) << out_;
Roland Levillain3d312422016-06-23 13:53:42 +0100756 DCHECK(instruction_->IsInstanceFieldGet() ||
757 instruction_->IsStaticFieldGet() ||
758 instruction_->IsArrayGet() ||
759 instruction_->IsInstanceOf() ||
760 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700761 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000762 << "Unexpected instruction in read barrier for heap reference slow path: "
763 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000764
765 __ Bind(GetEntryLabel());
766 SaveLiveRegisters(codegen, locations);
767
768 // We may have to change the index's value, but as `index_` is a
769 // constant member (like other "inputs" of this slow path),
770 // introduce a copy of it, `index`.
771 Location index = index_;
772 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100773 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000774 if (instruction_->IsArrayGet()) {
775 // Compute real offset and store it in index_.
776 Register index_reg = index_.AsRegister<CpuRegister>().AsRegister();
777 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
778 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
779 // We are about to change the value of `index_reg` (see the
780 // calls to art::x86_64::X86_64Assembler::shll and
781 // art::x86_64::X86_64Assembler::AddImmediate below), but it
782 // has not been saved by the previous call to
783 // art::SlowPathCode::SaveLiveRegisters, as it is a
784 // callee-save register --
785 // art::SlowPathCode::SaveLiveRegisters does not consider
786 // callee-save registers, as it has been designed with the
787 // assumption that callee-save registers are supposed to be
788 // handled by the called function. So, as a callee-save
789 // register, `index_reg` _would_ eventually be saved onto
790 // the stack, but it would be too late: we would have
791 // changed its value earlier. Therefore, we manually save
792 // it here into another freely available register,
793 // `free_reg`, chosen of course among the caller-save
794 // registers (as a callee-save `free_reg` register would
795 // exhibit the same problem).
796 //
797 // Note we could have requested a temporary register from
798 // the register allocator instead; but we prefer not to, as
799 // this is a slow path, and we know we can find a
800 // caller-save register that is available.
801 Register free_reg = FindAvailableCallerSaveRegister(codegen).AsRegister();
802 __ movl(CpuRegister(free_reg), CpuRegister(index_reg));
803 index_reg = free_reg;
804 index = Location::RegisterLocation(index_reg);
805 } else {
806 // The initial register stored in `index_` has already been
807 // saved in the call to art::SlowPathCode::SaveLiveRegisters
808 // (as it is not a callee-save register), so we can freely
809 // use it.
810 }
811 // Shifting the index value contained in `index_reg` by the
812 // scale factor (2) cannot overflow in practice, as the
813 // runtime is unable to allocate object arrays with a size
814 // larger than 2^26 - 1 (that is, 2^28 - 4 bytes).
815 __ shll(CpuRegister(index_reg), Immediate(TIMES_4));
816 static_assert(
817 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
818 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
819 __ AddImmediate(CpuRegister(index_reg), Immediate(offset_));
820 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100821 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
822 // intrinsics, `index_` is not shifted by a scale factor of 2
823 // (as in the case of ArrayGet), as it is actually an offset
824 // to an object field within an object.
825 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 DCHECK(instruction_->GetLocations()->Intrinsified());
827 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
828 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
829 << instruction_->AsInvoke()->GetIntrinsic();
830 DCHECK_EQ(offset_, 0U);
831 DCHECK(index_.IsRegister());
832 }
833 }
834
835 // We're moving two or three locations to locations that could
836 // overlap, so we need a parallel move resolver.
837 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100838 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain0d5a2812015-11-13 10:07:31 +0000839 parallel_move.AddMove(ref_,
840 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100841 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000842 nullptr);
843 parallel_move.AddMove(obj_,
844 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845 DataType::Type::kReference,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000846 nullptr);
847 if (index.IsValid()) {
848 parallel_move.AddMove(index,
849 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100850 DataType::Type::kInt32,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000851 nullptr);
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 } else {
854 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
855 __ movl(CpuRegister(calling_convention.GetRegisterAt(2)), Immediate(offset_));
856 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100857 x86_64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000858 instruction_,
859 instruction_->GetDexPc(),
860 this);
861 CheckEntrypointTypes<
862 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
863 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
864
865 RestoreLiveRegisters(codegen, locations);
866 __ jmp(GetExitLabel());
867 }
868
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100869 const char* GetDescription() const override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870 return "ReadBarrierForHeapReferenceSlowPathX86_64";
871 }
872
873 private:
874 CpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
875 size_t ref = static_cast<int>(ref_.AsRegister<CpuRegister>().AsRegister());
876 size_t obj = static_cast<int>(obj_.AsRegister<CpuRegister>().AsRegister());
877 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
878 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
879 return static_cast<CpuRegister>(i);
880 }
881 }
882 // We shall never fail to find a free caller-save register, as
883 // there are more than two core caller-save registers on x86-64
884 // (meaning it is possible to find one which is different from
885 // `ref` and `obj`).
886 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
887 LOG(FATAL) << "Could not find a free caller-save register";
888 UNREACHABLE();
889 }
890
Roland Levillain0d5a2812015-11-13 10:07:31 +0000891 const Location out_;
892 const Location ref_;
893 const Location obj_;
894 const uint32_t offset_;
895 // An additional location containing an index to an array.
896 // Only used for HArrayGet and the UnsafeGetObject &
897 // UnsafeGetObjectVolatile intrinsics.
898 const Location index_;
899
900 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86_64);
901};
902
903// Slow path generating a read barrier for a GC root.
904class ReadBarrierForRootSlowPathX86_64 : public SlowPathCode {
905 public:
906 ReadBarrierForRootSlowPathX86_64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000907 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000908 DCHECK(kEmitCompilerReadBarrier);
909 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000910
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100911 void EmitNativeCode(CodeGenerator* codegen) override {
Roland Levillain0d5a2812015-11-13 10:07:31 +0000912 LocationSummary* locations = instruction_->GetLocations();
913 DCHECK(locations->CanCall());
914 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain1e7f8db2015-12-15 10:54:19 +0000915 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
916 << "Unexpected instruction in read barrier for GC root slow path: "
917 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000918
919 __ Bind(GetEntryLabel());
920 SaveLiveRegisters(codegen, locations);
921
922 InvokeRuntimeCallingConvention calling_convention;
923 CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
924 x86_64_codegen->Move(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100925 x86_64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000926 instruction_,
927 instruction_->GetDexPc(),
928 this);
929 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
930 x86_64_codegen->Move(out_, Location::RegisterLocation(RAX));
931
932 RestoreLiveRegisters(codegen, locations);
933 __ jmp(GetExitLabel());
934 }
935
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100936 const char* GetDescription() const override { return "ReadBarrierForRootSlowPathX86_64"; }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000937
938 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000939 const Location out_;
940 const Location root_;
941
942 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86_64);
943};
944
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100945#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100946// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
947#define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100948
Roland Levillain4fa13f62015-07-06 18:11:54 +0100949inline Condition X86_64IntegerCondition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700950 switch (cond) {
951 case kCondEQ: return kEqual;
952 case kCondNE: return kNotEqual;
953 case kCondLT: return kLess;
954 case kCondLE: return kLessEqual;
955 case kCondGT: return kGreater;
956 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700957 case kCondB: return kBelow;
958 case kCondBE: return kBelowEqual;
959 case kCondA: return kAbove;
960 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700961 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100962 LOG(FATAL) << "Unreachable";
963 UNREACHABLE();
964}
965
Aart Bike9f37602015-10-09 11:15:55 -0700966// Maps FP condition to x86_64 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100967inline Condition X86_64FPCondition(IfCondition cond) {
968 switch (cond) {
969 case kCondEQ: return kEqual;
970 case kCondNE: return kNotEqual;
971 case kCondLT: return kBelow;
972 case kCondLE: return kBelowEqual;
973 case kCondGT: return kAbove;
974 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700975 default: break; // should not happen
Igor Murashkin2ffb7032017-11-08 13:35:21 -0800976 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100977 LOG(FATAL) << "Unreachable";
978 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700979}
980
Vladimir Markodc151b22015-10-15 18:02:30 +0100981HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
982 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffraybdb2ecc2018-09-18 14:33:55 +0100983 ArtMethod* method ATTRIBUTE_UNUSED) {
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +0000984 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +0100985}
986
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100987void CodeGeneratorX86_64::GenerateStaticOrDirectCall(
988 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800989 // All registers are assumed to be correctly set up.
Vladimir Marko4ee8e292017-06-02 15:39:30 +0000990
Vladimir Marko58155012015-08-19 12:49:41 +0000991 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
992 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100993 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +0000994 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100995 uint32_t offset =
996 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Andreas Gampe3db70682018-12-26 15:12:03 -0800997 __ gs()->movq(temp.AsRegister<CpuRegister>(), Address::Absolute(offset, /* no_rip= */ true));
Vladimir Marko58155012015-08-19 12:49:41 +0000998 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +0100999 }
Vladimir Marko58155012015-08-19 12:49:41 +00001000 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00001001 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00001002 break;
Vladimir Marko65979462017-05-19 17:25:12 +01001003 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01001004 DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
Vladimir Marko65979462017-05-19 17:25:12 +01001005 __ leal(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001006 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001007 RecordBootImageMethodPatch(invoke);
Vladimir Marko65979462017-05-19 17:25:12 +01001008 break;
Vladimir Markob066d432018-01-03 13:14:37 +00001009 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
1010 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1011 __ movl(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001012 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001013 RecordBootImageRelRoPatch(GetBootImageOffset(invoke));
Vladimir Markob066d432018-01-03 13:14:37 +00001014 break;
1015 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001016 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Marko58155012015-08-19 12:49:41 +00001017 __ movq(temp.AsRegister<CpuRegister>(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001018 Address::Absolute(kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001019 RecordMethodBssEntryPatch(invoke);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01001020 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko58155012015-08-19 12:49:41 +00001021 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001022 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001023 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
1024 Load64BitValue(temp.AsRegister<CpuRegister>(), invoke->GetMethodAddress());
1025 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001026 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
1027 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
1028 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko9b688a02015-05-06 14:12:42 +01001029 }
Vladimir Marko58155012015-08-19 12:49:41 +00001030 }
1031
1032 switch (invoke->GetCodePtrLocation()) {
1033 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
1034 __ call(&frame_entry_label_);
1035 break;
Vladimir Marko58155012015-08-19 12:49:41 +00001036 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
1037 // (callee_method + offset_of_quick_compiled_code)()
1038 __ call(Address(callee_method.AsRegister<CpuRegister>(),
1039 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001040 kX86_64PointerSize).SizeValue()));
Vladimir Marko58155012015-08-19 12:49:41 +00001041 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001042 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001043 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001044
1045 DCHECK(!IsLeafMethod());
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001046}
1047
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001048void CodeGeneratorX86_64::GenerateVirtualCall(
1049 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001050 CpuRegister temp = temp_in.AsRegister<CpuRegister>();
1051 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
1052 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001053
1054 // Use the calling convention instead of the location of the receiver, as
1055 // intrinsics may have put the receiver in a different register. In the intrinsics
1056 // slow path, the arguments have been moved to the right place, so here we are
1057 // guaranteed that the receiver is the first register of the calling convention.
1058 InvokeDexCallingConvention calling_convention;
1059 Register receiver = calling_convention.GetRegisterAt(0);
1060
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001061 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
Roland Levillain0d5a2812015-11-13 10:07:31 +00001062 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001063 __ movl(temp, Address(CpuRegister(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001064 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00001065 // Instead of simply (possibly) unpoisoning `temp` here, we should
1066 // emit a read barrier for the previous class reference load.
1067 // However this is not required in practice, as this is an
1068 // intermediate/temporary reference and because the current
1069 // concurrent copying collector keeps the from-space memory
1070 // intact/accessible until the end of the marking phase (the
1071 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001072 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001073
Nicolas Geoffray20036d82019-11-28 16:15:00 +00001074 MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00001075
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001076 // temp = temp->GetMethodAt(method_offset);
1077 __ movq(temp, Address(temp, method_offset));
1078 // call temp->GetEntryPoint();
1079 __ call(Address(temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07001080 kX86_64PointerSize).SizeValue()));
Vladimir Markoe7197bf2017-06-02 17:00:23 +01001081 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00001082}
1083
Vladimir Marko6fd16062018-06-26 11:02:04 +01001084void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001085 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, intrinsic_data);
1086 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001087}
1088
Vladimir Markob066d432018-01-03 13:14:37 +00001089void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001090 boot_image_other_patches_.emplace_back(/* target_dex_file= */ nullptr, boot_image_offset);
1091 __ Bind(&boot_image_other_patches_.back().label);
Vladimir Markob066d432018-01-03 13:14:37 +00001092}
1093
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001094void CodeGeneratorX86_64::RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke) {
1095 boot_image_method_patches_.emplace_back(
1096 invoke->GetTargetMethod().dex_file, invoke->GetTargetMethod().index);
Vladimir Marko65979462017-05-19 17:25:12 +01001097 __ Bind(&boot_image_method_patches_.back().label);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001098}
1099
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001100void CodeGeneratorX86_64::RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke) {
1101 method_bss_entry_patches_.emplace_back(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
1102 __ Bind(&method_bss_entry_patches_.back().label);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001103}
1104
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001105void CodeGeneratorX86_64::RecordBootImageTypePatch(HLoadClass* load_class) {
1106 boot_image_type_patches_.emplace_back(
1107 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001108 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001109}
1110
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001111Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001112 type_bss_entry_patches_.emplace_back(
1113 &load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001114 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001115}
1116
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001117void CodeGeneratorX86_64::RecordBootImageStringPatch(HLoadString* load_string) {
1118 boot_image_string_patches_.emplace_back(
1119 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
1120 __ Bind(&boot_image_string_patches_.back().label);
Vladimir Marko65979462017-05-19 17:25:12 +01001121}
1122
Vladimir Markoaad75c62016-10-03 08:46:48 +00001123Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001124 string_bss_entry_patches_.emplace_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001125 &load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001126 return &string_bss_entry_patches_.back().label;
Vladimir Markoaad75c62016-10-03 08:46:48 +00001127}
1128
Vladimir Marko6fd16062018-06-26 11:02:04 +01001129void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
1130 if (GetCompilerOptions().IsBootImage()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001131 __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001132 RecordBootImageIntrinsicPatch(boot_image_reference);
Vladimir Markoa2da9b92018-10-10 14:21:55 +01001133 } else if (GetCompilerOptions().GetCompilePic()) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001134 __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001135 RecordBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001136 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001137 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001138 gc::Heap* heap = Runtime::Current()->GetHeap();
1139 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001140 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01001141 __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))));
1142 }
1143}
1144
Vladimir Marko6fd16062018-06-26 11:02:04 +01001145void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1146 uint32_t boot_image_offset) {
1147 DCHECK(invoke->IsStatic());
1148 InvokeRuntimeCallingConvention calling_convention;
1149 CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0));
1150 if (GetCompilerOptions().IsBootImage()) {
1151 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1152 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1153 __ leal(argument,
Andreas Gampe3db70682018-12-26 15:12:03 -08001154 Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko6fd16062018-06-26 11:02:04 +01001155 MethodReference target_method = invoke->GetTargetMethod();
1156 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1157 boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_);
1158 __ Bind(&boot_image_type_patches_.back().label);
1159 } else {
1160 LoadBootImageAddress(argument, boot_image_offset);
1161 }
1162 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1163 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1164}
1165
Vladimir Markoaad75c62016-10-03 08:46:48 +00001166// The label points to the end of the "movl" or another instruction but the literal offset
1167// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
1168constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
1169
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001170template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00001171inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches(
1172 const ArenaDeque<PatchInfo<Label>>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001173 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001174 for (const PatchInfo<Label>& info : infos) {
1175 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
1176 linker_patches->push_back(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001177 Factory(literal_offset, info.target_dex_file, info.label.Position(), info.offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001178 }
1179}
1180
Vladimir Marko6fd16062018-06-26 11:02:04 +01001181template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1182linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1183 const DexFile* target_dex_file,
1184 uint32_t pc_insn_offset,
1185 uint32_t boot_image_offset) {
1186 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1187 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001188}
1189
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001190void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00001191 DCHECK(linker_patches->empty());
1192 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001193 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001194 method_bss_entry_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001195 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001196 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001197 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001198 string_bss_entry_patches_.size() +
Vladimir Marko2d06e022019-07-08 15:45:19 +01001199 boot_image_other_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00001200 linker_patches->reserve(size);
Vladimir Marko44ca0752019-07-29 10:18:25 +01001201 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001202 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1203 boot_image_method_patches_, linker_patches);
1204 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1205 boot_image_type_patches_, linker_patches);
1206 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001207 boot_image_string_patches_, linker_patches);
Vladimir Marko764d4542017-05-16 10:31:41 +01001208 } else {
Vladimir Marko2d06e022019-07-08 15:45:19 +01001209 DCHECK(boot_image_method_patches_.empty());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001210 DCHECK(boot_image_type_patches_.empty());
1211 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko2d06e022019-07-08 15:45:19 +01001212 }
1213 if (GetCompilerOptions().IsBootImage()) {
1214 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1215 boot_image_other_patches_, linker_patches);
1216 } else {
1217 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
1218 boot_image_other_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001219 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001220 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1221 method_bss_entry_patches_, linker_patches);
1222 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1223 type_bss_entry_patches_, linker_patches);
1224 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1225 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001226 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00001227}
1228
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001229void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001230 stream << Register(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001231}
1232
1233void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001234 stream << FloatRegister(reg);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001235}
1236
Vladimir Markoa0431112018-06-25 09:32:54 +01001237const X86_64InstructionSetFeatures& CodeGeneratorX86_64::GetInstructionSetFeatures() const {
1238 return *GetCompilerOptions().GetInstructionSetFeatures()->AsX86_64InstructionSetFeatures();
1239}
1240
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001241size_t CodeGeneratorX86_64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1242 __ movq(Address(CpuRegister(RSP), stack_index), CpuRegister(reg_id));
1243 return kX86_64WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001244}
1245
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001246size_t CodeGeneratorX86_64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1247 __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1248 return kX86_64WordSize;
1249}
1250
1251size_t CodeGeneratorX86_64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001252 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001253 __ movups(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -07001254 } else {
1255 __ movsd(Address(CpuRegister(RSP), stack_index), XmmRegister(reg_id));
1256 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001257 return GetSlowPathFPWidth();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01001258}
1259
1260size_t CodeGeneratorX86_64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -07001261 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -07001262 __ movups(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -07001263 } else {
1264 __ movsd(XmmRegister(reg_id), Address(CpuRegister(RSP), stack_index));
1265 }
Artem Serov6a0b6572019-07-26 20:38:37 +01001266 return GetSlowPathFPWidth();
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +01001267}
1268
Calin Juravle175dc732015-08-25 15:42:32 +01001269void CodeGeneratorX86_64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1270 HInstruction* instruction,
1271 uint32_t dex_pc,
1272 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001273 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001274 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value());
1275 if (EntrypointRequiresStackMap(entrypoint)) {
1276 RecordPcInfo(instruction, dex_pc, slow_path);
1277 }
Alexandre Rames8158f282015-08-07 10:26:17 +01001278}
1279
Roland Levillaindec8f632016-07-22 17:10:06 +01001280void CodeGeneratorX86_64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1281 HInstruction* instruction,
1282 SlowPathCode* slow_path) {
1283 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001284 GenerateInvokeRuntime(entry_point_offset);
1285}
1286
1287void CodeGeneratorX86_64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001288 __ gs()->call(Address::Absolute(entry_point_offset, /* no_rip= */ true));
Roland Levillaindec8f632016-07-22 17:10:06 +01001289}
1290
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001291static constexpr int kNumberOfCpuRegisterPairs = 0;
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001292// Use a fake return address register to mimic Quick.
1293static constexpr Register kFakeReturnRegister = Register(kLastCpuRegister + 1);
Mark Mendellfb8d2792015-03-31 22:16:59 -04001294CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001295 const CompilerOptions& compiler_options,
1296 OptimizingCompilerStats* stats)
Nicolas Geoffray98893962015-01-21 12:32:32 +00001297 : CodeGenerator(graph,
1298 kNumberOfCpuRegisters,
1299 kNumberOfFloatRegisters,
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001300 kNumberOfCpuRegisterPairs,
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001301 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1302 arraysize(kCoreCalleeSaves))
Nicolas Geoffray4597b5b2015-01-23 21:51:55 +00001303 | (1 << kFakeReturnRegister),
Nicolas Geoffray4dee6362015-01-23 18:23:14 +00001304 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1305 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001306 compiler_options,
1307 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001308 block_labels_(nullptr),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001309 location_builder_(graph, this),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001310 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001311 move_resolver_(graph->GetAllocator(), this),
1312 assembler_(graph->GetAllocator()),
Vladimir Marko58155012015-08-19 12:49:41 +00001313 constant_area_start_(0),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001314 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1315 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1316 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1317 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001318 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001319 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko2d06e022019-07-08 15:45:19 +01001320 boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001321 jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1322 jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1323 fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001324 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
1325}
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001326
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001327InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
1328 CodeGeneratorX86_64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001329 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001330 assembler_(codegen->GetAssembler()),
1331 codegen_(codegen) {}
1332
David Brazdil58282f42016-01-14 12:45:10 +00001333void CodeGeneratorX86_64::SetupBlockedRegisters() const {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001334 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001335 blocked_core_registers_[RSP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001336
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001337 // Block the register used as TMP.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001338 blocked_core_registers_[TMP] = true;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001339}
1340
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001341static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001342 return dwarf::Reg::X86_64Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001343}
David Srbecky9d8606d2015-04-12 09:35:32 +01001344
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001345static dwarf::Reg DWARFReg(FloatRegister reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001346 return dwarf::Reg::X86_64Fp(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001347}
1348
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001349void CodeGeneratorX86_64::MaybeIncrementHotness(bool is_frame_entry) {
1350 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1351 NearLabel overflow;
1352 Register method = kMethodRegisterArgument;
1353 if (!is_frame_entry) {
1354 CHECK(RequiresCurrentMethod());
1355 method = TMP;
1356 __ movq(CpuRegister(method), Address(CpuRegister(RSP), kCurrentMethodStackOffset));
1357 }
1358 __ cmpw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1359 Immediate(ArtMethod::MaxCounter()));
1360 __ j(kEqual, &overflow);
1361 __ addw(Address(CpuRegister(method), ArtMethod::HotnessCountOffset().Int32Value()),
1362 Immediate(1));
1363 __ Bind(&overflow);
1364 }
1365
1366 if (GetGraph()->IsCompilingBaseline() && !Runtime::Current()->IsAotCompiler()) {
1367 ScopedObjectAccess soa(Thread::Current());
1368 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
1369 uint64_t address = reinterpret_cast64<uint64_t>(info);
1370 NearLabel done;
1371 __ movq(CpuRegister(TMP), Immediate(address));
1372 __ addw(Address(CpuRegister(TMP), ProfilingInfo::BaselineHotnessCountOffset().Int32Value()),
1373 Immediate(1));
1374 __ j(kCarryClear, &done);
1375 if (HasEmptyFrame()) {
1376 CHECK(is_frame_entry);
1377 // Frame alignment, and the stub expects the method on the stack.
1378 __ pushq(CpuRegister(RDI));
1379 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1380 __ cfi().RelOffset(DWARFReg(RDI), 0);
1381 } else if (!RequiresCurrentMethod()) {
1382 CHECK(is_frame_entry);
1383 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
1384 }
1385 GenerateInvokeRuntime(
1386 GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value());
1387 if (HasEmptyFrame()) {
1388 __ popq(CpuRegister(RDI));
1389 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1390 __ cfi().Restore(DWARFReg(RDI));
1391 }
1392 __ Bind(&done);
1393 }
1394}
1395
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001396void CodeGeneratorX86_64::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001397 __ cfi().SetCurrentCFAOffset(kX86_64WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001398 __ Bind(&frame_entry_label_);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001399 bool skip_overflow_check = IsLeafMethod()
Dave Allison648d7112014-07-25 16:15:27 -07001400 && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001401 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001402
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001403
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001404 if (!skip_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001405 size_t reserved_bytes = GetStackOverflowReservedBytes(InstructionSet::kX86_64);
1406 __ testq(CpuRegister(RAX), Address(CpuRegister(RSP), -static_cast<int32_t>(reserved_bytes)));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001407 RecordPcInfo(nullptr, 0);
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001408 }
Nicolas Geoffraya26369a2015-01-22 08:46:05 +00001409
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001410 if (!HasEmptyFrame()) {
1411 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1412 Register reg = kCoreCalleeSaves[i];
1413 if (allocated_registers_.ContainsCoreRegister(reg)) {
1414 __ pushq(CpuRegister(reg));
1415 __ cfi().AdjustCFAOffset(kX86_64WordSize);
1416 __ cfi().RelOffset(DWARFReg(reg), 0);
1417 }
1418 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001419
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001420 int adjust = GetFrameSize() - GetCoreSpillSize();
1421 __ subq(CpuRegister(RSP), Immediate(adjust));
1422 __ cfi().AdjustCFAOffset(adjust);
1423 uint32_t xmm_spill_location = GetFpuSpillStart();
1424 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
1425
1426 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1427 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1428 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1429 __ movsd(Address(CpuRegister(RSP), offset), XmmRegister(kFpuCalleeSaves[i]));
1430 __ cfi().RelOffset(DWARFReg(kFpuCalleeSaves[i]), offset);
1431 }
1432 }
1433
1434 // Save the current method if we need it. Note that we do not
1435 // do this in HCurrentMethod, as the instruction might have been removed
1436 // in the SSA graph.
1437 if (RequiresCurrentMethod()) {
1438 CHECK(!HasEmptyFrame());
1439 __ movq(Address(CpuRegister(RSP), kCurrentMethodStackOffset),
1440 CpuRegister(kMethodRegisterArgument));
1441 }
1442
1443 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1444 CHECK(!HasEmptyFrame());
1445 // Initialize should_deoptimize flag to 0.
1446 __ movl(Address(CpuRegister(RSP), GetStackOffsetOfShouldDeoptimizeFlag()), Immediate(0));
Nicolas Geoffray98893962015-01-21 12:32:32 +00001447 }
1448 }
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +01001449
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001450 MaybeIncrementHotness(/* is_frame_entry= */ true);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001451}
1452
1453void CodeGeneratorX86_64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001454 __ cfi().RememberState();
1455 if (!HasEmptyFrame()) {
1456 uint32_t xmm_spill_location = GetFpuSpillStart();
Artem Serov6a0b6572019-07-26 20:38:37 +01001457 size_t xmm_spill_slot_size = GetCalleePreservedFPWidth();
David Srbeckyc34dc932015-04-12 09:27:43 +01001458 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1459 if (allocated_registers_.ContainsFloatingPointRegister(kFpuCalleeSaves[i])) {
1460 int offset = xmm_spill_location + (xmm_spill_slot_size * i);
1461 __ movsd(XmmRegister(kFpuCalleeSaves[i]), Address(CpuRegister(RSP), offset));
1462 __ cfi().Restore(DWARFReg(kFpuCalleeSaves[i]));
1463 }
1464 }
1465
1466 int adjust = GetFrameSize() - GetCoreSpillSize();
1467 __ addq(CpuRegister(RSP), Immediate(adjust));
1468 __ cfi().AdjustCFAOffset(-adjust);
1469
1470 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1471 Register reg = kCoreCalleeSaves[i];
1472 if (allocated_registers_.ContainsCoreRegister(reg)) {
1473 __ popq(CpuRegister(reg));
1474 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86_64WordSize));
1475 __ cfi().Restore(DWARFReg(reg));
1476 }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001477 }
1478 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001479 __ ret();
1480 __ cfi().RestoreState();
1481 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001482}
1483
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001484void CodeGeneratorX86_64::Bind(HBasicBlock* block) {
1485 __ Bind(GetLabelOf(block));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001486}
1487
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001488void CodeGeneratorX86_64::Move(Location destination, Location source) {
1489 if (source.Equals(destination)) {
1490 return;
1491 }
1492 if (destination.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001493 CpuRegister dest = destination.AsRegister<CpuRegister>();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001494 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001495 __ movq(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001496 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001497 __ movd(dest, source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001498 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001499 __ movl(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
1500 } else if (source.IsConstant()) {
1501 HConstant* constant = source.GetConstant();
1502 if (constant->IsLongConstant()) {
1503 Load64BitValue(dest, constant->AsLongConstant()->GetValue());
1504 } else {
1505 Load32BitValue(dest, GetInt32ValueOf(constant));
1506 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001507 } else {
1508 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001509 __ movq(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001510 }
1511 } else if (destination.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001512 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001513 if (source.IsRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001514 __ movd(dest, source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001515 } else if (source.IsFpuRegister()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001516 __ movaps(dest, source.AsFpuRegister<XmmRegister>());
1517 } else if (source.IsConstant()) {
1518 HConstant* constant = source.GetConstant();
1519 int64_t value = CodeGenerator::GetInt64ValueOf(constant);
1520 if (constant->IsFloatConstant()) {
1521 Load32BitValue(dest, static_cast<int32_t>(value));
1522 } else {
1523 Load64BitValue(dest, value);
1524 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001525 } else if (source.IsStackSlot()) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001526 __ movss(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001527 } else {
1528 DCHECK(source.IsDoubleStackSlot());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001529 __ movsd(dest, Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001530 }
1531 } else if (destination.IsStackSlot()) {
1532 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001533 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001534 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001535 } else if (source.IsFpuRegister()) {
1536 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001537 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001538 } else if (source.IsConstant()) {
1539 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001540 int32_t value = GetInt32ValueOf(constant);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001541 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001542 } else {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001543 DCHECK(source.IsStackSlot()) << source;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001544 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1545 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001546 }
1547 } else {
1548 DCHECK(destination.IsDoubleStackSlot());
1549 if (source.IsRegister()) {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001550 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001551 source.AsRegister<CpuRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001552 } else if (source.IsFpuRegister()) {
1553 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00001554 source.AsFpuRegister<XmmRegister>());
Mark Mendell24f2dfa2015-01-14 19:51:45 -05001555 } else if (source.IsConstant()) {
1556 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001557 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1558 int64_t value = GetInt64ValueOf(constant);
Mark Mendellcfa410b2015-05-25 16:02:44 -04001559 Store64BitValueToStack(destination, value);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001560 } else {
1561 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00001562 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1563 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001564 }
1565 }
1566}
1567
Calin Juravle175dc732015-08-25 15:42:32 +01001568void CodeGeneratorX86_64::MoveConstant(Location location, int32_t value) {
1569 DCHECK(location.IsRegister());
1570 Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
1571}
1572
Calin Juravlee460d1d2015-09-29 04:52:17 +01001573void CodeGeneratorX86_64::MoveLocation(
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001574 Location dst, Location src, DataType::Type dst_type ATTRIBUTE_UNUSED) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001575 Move(dst, src);
1576}
1577
1578void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1579 if (location.IsRegister()) {
1580 locations->AddTemp(location);
1581 } else {
1582 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1583 }
1584}
1585
David Brazdilfc6a86a2015-06-26 10:33:45 +00001586void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08001587 if (successor->IsExitBlock()) {
1588 DCHECK(got->GetPrevious()->AlwaysThrows());
1589 return; // no code needed
1590 }
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001591
1592 HBasicBlock* block = got->GetBlock();
1593 HInstruction* previous = got->GetPrevious();
1594
1595 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001596 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +00001597 codegen_->MaybeIncrementHotness(/* is_frame_entry= */ false);
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001598 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1599 return;
1600 }
1601
1602 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1603 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1604 }
1605 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001606 __ jmp(codegen_->GetLabelOf(successor));
1607 }
1608}
1609
David Brazdilfc6a86a2015-06-26 10:33:45 +00001610void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
1611 got->SetLocations(nullptr);
1612}
1613
1614void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
1615 HandleGoto(got, got->GetSuccessor());
1616}
1617
1618void LocationsBuilderX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1619 try_boundary->SetLocations(nullptr);
1620}
1621
1622void InstructionCodeGeneratorX86_64::VisitTryBoundary(HTryBoundary* try_boundary) {
1623 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1624 if (!successor->IsExitBlock()) {
1625 HandleGoto(try_boundary, successor);
1626 }
1627}
1628
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001629void LocationsBuilderX86_64::VisitExit(HExit* exit) {
1630 exit->SetLocations(nullptr);
1631}
1632
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001633void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001634}
1635
Mark Mendell152408f2015-12-31 12:28:50 -05001636template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001637void InstructionCodeGeneratorX86_64::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001638 LabelType* true_label,
1639 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001640 if (cond->IsFPConditionTrueIfNaN()) {
1641 __ j(kUnordered, true_label);
1642 } else if (cond->IsFPConditionFalseIfNaN()) {
1643 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001644 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001645 __ j(X86_64FPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001646}
1647
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001648void InstructionCodeGeneratorX86_64::GenerateCompareTest(HCondition* condition) {
Mark Mendellc4701932015-04-10 13:18:51 -04001649 LocationSummary* locations = condition->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001650
Mark Mendellc4701932015-04-10 13:18:51 -04001651 Location left = locations->InAt(0);
1652 Location right = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001653 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendellc4701932015-04-10 13:18:51 -04001654 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001655 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001656 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001657 case DataType::Type::kInt8:
1658 case DataType::Type::kUint16:
1659 case DataType::Type::kInt16:
1660 case DataType::Type::kInt32:
1661 case DataType::Type::kReference: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001662 codegen_->GenerateIntCompare(left, right);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001663 break;
1664 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001665 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001666 codegen_->GenerateLongCompare(left, right);
Mark Mendellc4701932015-04-10 13:18:51 -04001667 break;
1668 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001669 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04001670 if (right.IsFpuRegister()) {
1671 __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1672 } else if (right.IsConstant()) {
1673 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1674 codegen_->LiteralFloatAddress(
1675 right.GetConstant()->AsFloatConstant()->GetValue()));
1676 } else {
1677 DCHECK(right.IsStackSlot());
1678 __ ucomiss(left.AsFpuRegister<XmmRegister>(),
1679 Address(CpuRegister(RSP), right.GetStackIndex()));
1680 }
Mark Mendellc4701932015-04-10 13:18:51 -04001681 break;
1682 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001683 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001684 if (right.IsFpuRegister()) {
1685 __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
1686 } else if (right.IsConstant()) {
1687 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1688 codegen_->LiteralDoubleAddress(
1689 right.GetConstant()->AsDoubleConstant()->GetValue()));
1690 } else {
1691 DCHECK(right.IsDoubleStackSlot());
1692 __ ucomisd(left.AsFpuRegister<XmmRegister>(),
1693 Address(CpuRegister(RSP), right.GetStackIndex()));
1694 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001695 break;
1696 }
1697 default:
1698 LOG(FATAL) << "Unexpected condition type " << type;
1699 }
1700}
1701
1702template<class LabelType>
1703void InstructionCodeGeneratorX86_64::GenerateCompareTestAndBranch(HCondition* condition,
1704 LabelType* true_target_in,
1705 LabelType* false_target_in) {
1706 // Generated branching requires both targets to be explicit. If either of the
1707 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
1708 LabelType fallthrough_target;
1709 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1710 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
1711
1712 // Generate the comparison to set the CC.
1713 GenerateCompareTest(condition);
1714
1715 // Now generate the correct jump(s).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001716 DataType::Type type = condition->InputAt(0)->GetType();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001717 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001718 case DataType::Type::kInt64: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001719 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
1720 break;
1721 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001722 case DataType::Type::kFloat32: {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001723 GenerateFPJumps(condition, true_target, false_target);
1724 break;
1725 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001726 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04001727 GenerateFPJumps(condition, true_target, false_target);
1728 break;
1729 }
1730 default:
1731 LOG(FATAL) << "Unexpected condition type " << type;
1732 }
1733
David Brazdil0debae72015-11-12 18:37:00 +00001734 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001735 __ jmp(false_target);
1736 }
David Brazdil0debae72015-11-12 18:37:00 +00001737
1738 if (fallthrough_target.IsLinked()) {
1739 __ Bind(&fallthrough_target);
1740 }
Mark Mendellc4701932015-04-10 13:18:51 -04001741}
1742
David Brazdil0debae72015-11-12 18:37:00 +00001743static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1744 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1745 // are set only strictly before `branch`. We can't use the eflags on long
1746 // conditions if they are materialized due to the complex branching.
1747 return cond->IsCondition() &&
1748 cond->GetNext() == branch &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001749 !DataType::IsFloatingPointType(cond->InputAt(0)->GetType());
David Brazdil0debae72015-11-12 18:37:00 +00001750}
1751
Mark Mendell152408f2015-12-31 12:28:50 -05001752template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001753void InstructionCodeGeneratorX86_64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001754 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001755 LabelType* true_target,
1756 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001757 HInstruction* cond = instruction->InputAt(condition_input_index);
1758
1759 if (true_target == nullptr && false_target == nullptr) {
1760 // Nothing to do. The code always falls through.
1761 return;
1762 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001763 // Constant condition, statically compared against "true" (integer value 1).
1764 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001765 if (true_target != nullptr) {
1766 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001767 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001768 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001769 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001770 if (false_target != nullptr) {
1771 __ jmp(false_target);
1772 }
1773 }
1774 return;
1775 }
1776
1777 // The following code generates these patterns:
1778 // (1) true_target == nullptr && false_target != nullptr
1779 // - opposite condition true => branch to false_target
1780 // (2) true_target != nullptr && false_target == nullptr
1781 // - condition true => branch to true_target
1782 // (3) true_target != nullptr && false_target != nullptr
1783 // - condition true => branch to true_target
1784 // - branch to false_target
1785 if (IsBooleanValueOrMaterializedCondition(cond)) {
1786 if (AreEflagsSetFrom(cond, instruction)) {
1787 if (true_target == nullptr) {
1788 __ j(X86_64IntegerCondition(cond->AsCondition()->GetOppositeCondition()), false_target);
1789 } else {
1790 __ j(X86_64IntegerCondition(cond->AsCondition()->GetCondition()), true_target);
1791 }
1792 } else {
1793 // Materialized condition, compare against 0.
1794 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1795 if (lhs.IsRegister()) {
1796 __ testl(lhs.AsRegister<CpuRegister>(), lhs.AsRegister<CpuRegister>());
1797 } else {
1798 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
1799 }
1800 if (true_target == nullptr) {
1801 __ j(kEqual, false_target);
1802 } else {
1803 __ j(kNotEqual, true_target);
1804 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001805 }
1806 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001807 // Condition has not been materialized, use its inputs as the
1808 // comparison and its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001809 HCondition* condition = cond->AsCondition();
Mark Mendellc4701932015-04-10 13:18:51 -04001810
David Brazdil0debae72015-11-12 18:37:00 +00001811 // If this is a long or FP comparison that has been folded into
1812 // the HCondition, generate the comparison directly.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001813 DataType::Type type = condition->InputAt(0)->GetType();
1814 if (type == DataType::Type::kInt64 || DataType::IsFloatingPointType(type)) {
David Brazdil0debae72015-11-12 18:37:00 +00001815 GenerateCompareTestAndBranch(condition, true_target, false_target);
1816 return;
1817 }
1818
1819 Location lhs = condition->GetLocations()->InAt(0);
1820 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001821 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001822 if (true_target == nullptr) {
1823 __ j(X86_64IntegerCondition(condition->GetOppositeCondition()), false_target);
1824 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001825 __ j(X86_64IntegerCondition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001826 }
Dave Allison20dfc792014-06-16 20:44:29 -07001827 }
David Brazdil0debae72015-11-12 18:37:00 +00001828
1829 // If neither branch falls through (case 3), the conditional branch to `true_target`
1830 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1831 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001832 __ jmp(false_target);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01001833 }
1834}
1835
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001836void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001837 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00001838 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001839 locations->SetInAt(0, Location::Any());
1840 }
1841}
1842
1843void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001844 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1845 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1846 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1847 nullptr : codegen_->GetLabelOf(true_successor);
1848 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1849 nullptr : codegen_->GetLabelOf(false_successor);
Andreas Gampe3db70682018-12-26 15:12:03 -08001850 GenerateTestAndBranch(if_instr, /* condition_input_index= */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001851}
1852
1853void LocationsBuilderX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001854 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001855 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01001856 InvokeRuntimeCallingConvention calling_convention;
1857 RegisterSet caller_saves = RegisterSet::Empty();
1858 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1859 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00001860 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001861 locations->SetInAt(0, Location::Any());
1862 }
1863}
1864
1865void InstructionCodeGeneratorX86_64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001866 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86_64>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001867 GenerateTestAndBranch<Label>(deoptimize,
Andreas Gampe3db70682018-12-26 15:12:03 -08001868 /* condition_input_index= */ 0,
David Brazdil74eb1b22015-12-14 11:44:01 +00001869 slow_path->GetEntryLabel(),
Andreas Gampe3db70682018-12-26 15:12:03 -08001870 /* false_target= */ nullptr);
David Brazdil74eb1b22015-12-14 11:44:01 +00001871}
1872
Mingyao Yang063fc772016-08-02 11:02:54 -07001873void LocationsBuilderX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001874 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07001875 LocationSummary(flag, LocationSummary::kNoCall);
1876 locations->SetOut(Location::RequiresRegister());
1877}
1878
1879void InstructionCodeGeneratorX86_64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1880 __ movl(flag->GetLocations()->Out().AsRegister<CpuRegister>(),
1881 Address(CpuRegister(RSP), codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1882}
1883
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001884static bool SelectCanUseCMOV(HSelect* select) {
1885 // There are no conditional move instructions for XMMs.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001886 if (DataType::IsFloatingPointType(select->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001887 return false;
1888 }
1889
1890 // A FP condition doesn't generate the single CC that we need.
1891 HInstruction* condition = select->GetCondition();
1892 if (condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001893 DataType::IsFloatingPointType(condition->InputAt(0)->GetType())) {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001894 return false;
1895 }
1896
1897 // We can generate a CMOV for this Select.
1898 return true;
1899}
1900
David Brazdil74eb1b22015-12-14 11:44:01 +00001901void LocationsBuilderX86_64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001902 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001903 if (DataType::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001904 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001905 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001906 } else {
1907 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001908 if (SelectCanUseCMOV(select)) {
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001909 if (select->InputAt(1)->IsConstant()) {
1910 locations->SetInAt(1, Location::RequiresRegister());
1911 } else {
1912 locations->SetInAt(1, Location::Any());
1913 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001914 } else {
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001915 locations->SetInAt(1, Location::Any());
1916 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001917 }
1918 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1919 locations->SetInAt(2, Location::RequiresRegister());
1920 }
1921 locations->SetOut(Location::SameAsFirstInput());
1922}
1923
1924void InstructionCodeGeneratorX86_64::VisitSelect(HSelect* select) {
1925 LocationSummary* locations = select->GetLocations();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001926 if (SelectCanUseCMOV(select)) {
1927 // If both the condition and the source types are integer, we can generate
1928 // a CMOV to implement Select.
1929 CpuRegister value_false = locations->InAt(0).AsRegister<CpuRegister>();
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001930 Location value_true_loc = locations->InAt(1);
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001931 DCHECK(locations->InAt(0).Equals(locations->Out()));
1932
1933 HInstruction* select_condition = select->GetCondition();
1934 Condition cond = kNotEqual;
1935
1936 // Figure out how to test the 'condition'.
1937 if (select_condition->IsCondition()) {
1938 HCondition* condition = select_condition->AsCondition();
1939 if (!condition->IsEmittedAtUseSite()) {
1940 // This was a previously materialized condition.
1941 // Can we use the existing condition code?
1942 if (AreEflagsSetFrom(condition, select)) {
1943 // Materialization was the previous instruction. Condition codes are right.
1944 cond = X86_64IntegerCondition(condition->GetCondition());
1945 } else {
1946 // No, we have to recreate the condition code.
1947 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1948 __ testl(cond_reg, cond_reg);
1949 }
1950 } else {
1951 GenerateCompareTest(condition);
1952 cond = X86_64IntegerCondition(condition->GetCondition());
1953 }
1954 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001955 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001956 CpuRegister cond_reg = locations->InAt(2).AsRegister<CpuRegister>();
1957 __ testl(cond_reg, cond_reg);
1958 }
1959
1960 // If the condition is true, overwrite the output, which already contains false.
1961 // Generate the correct sized CMOV.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001962 bool is_64_bit = DataType::Is64BitType(select->GetType());
Mark Mendelldee1b9a2016-02-12 14:36:51 -05001963 if (value_true_loc.IsRegister()) {
1964 __ cmov(cond, value_false, value_true_loc.AsRegister<CpuRegister>(), is_64_bit);
1965 } else {
1966 __ cmov(cond,
1967 value_false,
1968 Address(CpuRegister(RSP), value_true_loc.GetStackIndex()), is_64_bit);
1969 }
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001970 } else {
1971 NearLabel false_target;
1972 GenerateTestAndBranch<NearLabel>(select,
Andreas Gampe3db70682018-12-26 15:12:03 -08001973 /* condition_input_index= */ 2,
1974 /* true_target= */ nullptr,
Mark Mendell7c0b44f2016-02-01 10:08:35 -05001975 &false_target);
1976 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1977 __ Bind(&false_target);
1978 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001979}
1980
David Srbecky0cf44932015-12-09 14:09:59 +00001981void LocationsBuilderX86_64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001982 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00001983}
1984
David Srbeckyd28f4a02016-03-14 17:14:24 +00001985void InstructionCodeGeneratorX86_64::VisitNativeDebugInfo(HNativeDebugInfo*) {
1986 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001987}
1988
1989void CodeGeneratorX86_64::GenerateNop() {
1990 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001991}
1992
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993void LocationsBuilderX86_64::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001994 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001995 new (GetGraph()->GetAllocator()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001996 // Handle the long/FP comparisons made in instruction simplification.
1997 switch (cond->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001998 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04001999 locations->SetInAt(0, Location::RequiresRegister());
2000 locations->SetInAt(1, Location::Any());
2001 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002002 case DataType::Type::kFloat32:
2003 case DataType::Type::kFloat64:
Mark Mendellc4701932015-04-10 13:18:51 -04002004 locations->SetInAt(0, Location::RequiresFpuRegister());
2005 locations->SetInAt(1, Location::Any());
2006 break;
2007 default:
2008 locations->SetInAt(0, Location::RequiresRegister());
2009 locations->SetInAt(1, Location::Any());
2010 break;
2011 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002012 if (!cond->IsEmittedAtUseSite()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01002013 locations->SetOut(Location::RequiresRegister());
2014 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002015}
2016
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002017void InstructionCodeGeneratorX86_64::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002018 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04002019 return;
Dave Allison20dfc792014-06-16 20:44:29 -07002020 }
Mark Mendellc4701932015-04-10 13:18:51 -04002021
2022 LocationSummary* locations = cond->GetLocations();
2023 Location lhs = locations->InAt(0);
2024 Location rhs = locations->InAt(1);
2025 CpuRegister reg = locations->Out().AsRegister<CpuRegister>();
Mark Mendell152408f2015-12-31 12:28:50 -05002026 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002027
2028 switch (cond->InputAt(0)->GetType()) {
2029 default:
2030 // Integer case.
2031
2032 // Clear output register: setcc only sets the low byte.
2033 __ xorl(reg, reg);
2034
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002035 codegen_->GenerateIntCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002036 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002037 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002038 case DataType::Type::kInt64:
Mark Mendellc4701932015-04-10 13:18:51 -04002039 // Clear output register: setcc only sets the low byte.
2040 __ xorl(reg, reg);
2041
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002042 codegen_->GenerateLongCompare(lhs, rhs);
Roland Levillain4fa13f62015-07-06 18:11:54 +01002043 __ setcc(X86_64IntegerCondition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04002044 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002045 case DataType::Type::kFloat32: {
Mark Mendellc4701932015-04-10 13:18:51 -04002046 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2047 if (rhs.IsConstant()) {
2048 float value = rhs.GetConstant()->AsFloatConstant()->GetValue();
2049 __ ucomiss(lhs_reg, codegen_->LiteralFloatAddress(value));
2050 } else if (rhs.IsStackSlot()) {
2051 __ ucomiss(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2052 } else {
2053 __ ucomiss(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2054 }
2055 GenerateFPJumps(cond, &true_label, &false_label);
2056 break;
2057 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002058 case DataType::Type::kFloat64: {
Mark Mendellc4701932015-04-10 13:18:51 -04002059 XmmRegister lhs_reg = lhs.AsFpuRegister<XmmRegister>();
2060 if (rhs.IsConstant()) {
2061 double value = rhs.GetConstant()->AsDoubleConstant()->GetValue();
2062 __ ucomisd(lhs_reg, codegen_->LiteralDoubleAddress(value));
2063 } else if (rhs.IsDoubleStackSlot()) {
2064 __ ucomisd(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
2065 } else {
2066 __ ucomisd(lhs_reg, rhs.AsFpuRegister<XmmRegister>());
2067 }
2068 GenerateFPJumps(cond, &true_label, &false_label);
2069 break;
2070 }
2071 }
2072
2073 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002074 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04002075
Roland Levillain4fa13f62015-07-06 18:11:54 +01002076 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04002077 __ Bind(&false_label);
2078 __ xorl(reg, reg);
2079 __ jmp(&done_label);
2080
Roland Levillain4fa13f62015-07-06 18:11:54 +01002081 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04002082 __ Bind(&true_label);
2083 __ movl(reg, Immediate(1));
2084 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07002085}
2086
2087void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002088 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002089}
2090
2091void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002092 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002093}
2094
2095void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002096 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002097}
2098
2099void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002100 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002101}
2102
2103void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002104 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002105}
2106
2107void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002108 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002109}
2110
2111void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002112 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002113}
2114
2115void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002116 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002117}
2118
2119void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002120 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002121}
2122
2123void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002124 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002125}
2126
2127void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002128 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07002129}
2130
2131void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002132 HandleCondition(comp);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002133}
2134
Aart Bike9f37602015-10-09 11:15:55 -07002135void LocationsBuilderX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002136 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002137}
2138
2139void InstructionCodeGeneratorX86_64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002140 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002141}
2142
2143void LocationsBuilderX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002144 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002145}
2146
2147void InstructionCodeGeneratorX86_64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002148 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002149}
2150
2151void LocationsBuilderX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002152 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002153}
2154
2155void InstructionCodeGeneratorX86_64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002156 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002157}
2158
2159void LocationsBuilderX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002160 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002161}
2162
2163void InstructionCodeGeneratorX86_64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002164 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002165}
2166
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002167void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002168 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002169 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00002170 switch (compare->InputAt(0)->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002171 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002172 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002174 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002175 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002176 case DataType::Type::kInt32:
2177 case DataType::Type::kInt64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002178 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002179 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002180 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2181 break;
2182 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002183 case DataType::Type::kFloat32:
2184 case DataType::Type::kFloat64: {
Calin Juravleddb7df22014-11-25 20:56:51 +00002185 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04002186 locations->SetInAt(1, Location::Any());
Calin Juravleddb7df22014-11-25 20:56:51 +00002187 locations->SetOut(Location::RequiresRegister());
2188 break;
2189 }
2190 default:
2191 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2192 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002193}
2194
2195void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002196 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002197 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Calin Juravleddb7df22014-11-25 20:56:51 +00002198 Location left = locations->InAt(0);
2199 Location right = locations->InAt(1);
2200
Mark Mendell0c9497d2015-08-21 09:30:05 -04002201 NearLabel less, greater, done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 DataType::Type type = compare->InputAt(0)->GetType();
Aart Bika19616e2016-02-01 18:57:58 -08002203 Condition less_cond = kLess;
2204
Calin Juravleddb7df22014-11-25 20:56:51 +00002205 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002206 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002207 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002208 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002209 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002210 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002211 case DataType::Type::kInt32: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002212 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002213 break;
2214 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002215 case DataType::Type::kInt64: {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01002216 codegen_->GenerateLongCompare(left, right);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002217 break;
Calin Juravleddb7df22014-11-25 20:56:51 +00002218 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002219 case DataType::Type::kFloat32: {
Mark Mendell40741f32015-04-20 22:10:34 -04002220 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2221 if (right.IsConstant()) {
2222 float value = right.GetConstant()->AsFloatConstant()->GetValue();
2223 __ ucomiss(left_reg, codegen_->LiteralFloatAddress(value));
2224 } else if (right.IsStackSlot()) {
2225 __ ucomiss(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2226 } else {
2227 __ ucomiss(left_reg, right.AsFpuRegister<XmmRegister>());
2228 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002229 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002230 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002231 break;
2232 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002233 case DataType::Type::kFloat64: {
Mark Mendell40741f32015-04-20 22:10:34 -04002234 XmmRegister left_reg = left.AsFpuRegister<XmmRegister>();
2235 if (right.IsConstant()) {
2236 double value = right.GetConstant()->AsDoubleConstant()->GetValue();
2237 __ ucomisd(left_reg, codegen_->LiteralDoubleAddress(value));
2238 } else if (right.IsDoubleStackSlot()) {
2239 __ ucomisd(left_reg, Address(CpuRegister(RSP), right.GetStackIndex()));
2240 } else {
2241 __ ucomisd(left_reg, right.AsFpuRegister<XmmRegister>());
2242 }
Calin Juravleddb7df22014-11-25 20:56:51 +00002243 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08002244 less_cond = kBelow; // ucomis{s,d} sets CF
Calin Juravleddb7df22014-11-25 20:56:51 +00002245 break;
2246 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002247 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00002248 LOG(FATAL) << "Unexpected compare type " << type;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002249 }
Aart Bika19616e2016-02-01 18:57:58 -08002250
Calin Juravleddb7df22014-11-25 20:56:51 +00002251 __ movl(out, Immediate(0));
Calin Juravle91debbc2014-11-26 19:01:09 +00002252 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08002253 __ j(less_cond, &less);
Calin Juravlefd861242014-11-25 20:56:51 +00002254
Calin Juravle91debbc2014-11-26 19:01:09 +00002255 __ Bind(&greater);
Calin Juravleddb7df22014-11-25 20:56:51 +00002256 __ movl(out, Immediate(1));
2257 __ jmp(&done);
2258
2259 __ Bind(&less);
2260 __ movl(out, Immediate(-1));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01002261
2262 __ Bind(&done);
2263}
2264
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002265void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002266 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002267 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002268 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002269}
2270
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002271void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002272 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002273}
2274
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002275void LocationsBuilderX86_64::VisitNullConstant(HNullConstant* constant) {
2276 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002277 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002278 locations->SetOut(Location::ConstantLocation(constant));
2279}
2280
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002281void InstructionCodeGeneratorX86_64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002282 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002283}
2284
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002285void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002286 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002287 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002288 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002289}
2290
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002291void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002292 // Will be generated at use site.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002293}
2294
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002295void LocationsBuilderX86_64::VisitFloatConstant(HFloatConstant* constant) {
2296 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002297 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002298 locations->SetOut(Location::ConstantLocation(constant));
2299}
2300
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002301void InstructionCodeGeneratorX86_64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002302 // Will be generated at use site.
2303}
2304
2305void LocationsBuilderX86_64::VisitDoubleConstant(HDoubleConstant* constant) {
2306 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002307 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002308 locations->SetOut(Location::ConstantLocation(constant));
2309}
2310
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002311void InstructionCodeGeneratorX86_64::VisitDoubleConstant(
2312 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002313 // Will be generated at use site.
2314}
2315
Igor Murashkind01745e2017-04-05 16:40:31 -07002316void LocationsBuilderX86_64::VisitConstructorFence(HConstructorFence* constructor_fence) {
2317 constructor_fence->SetLocations(nullptr);
2318}
2319
2320void InstructionCodeGeneratorX86_64::VisitConstructorFence(
2321 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2322 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2323}
2324
Calin Juravle27df7582015-04-17 19:12:31 +01002325void LocationsBuilderX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2326 memory_barrier->SetLocations(nullptr);
2327}
2328
2329void InstructionCodeGeneratorX86_64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00002330 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002331}
2332
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002333void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
2334 ret->SetLocations(nullptr);
2335}
2336
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002337void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002338 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002339}
2340
2341void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002342 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002343 new (GetGraph()->GetAllocator()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002344 switch (ret->InputAt(0)->GetType()) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002345 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002346 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002347 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002348 case DataType::Type::kInt8:
2349 case DataType::Type::kUint16:
2350 case DataType::Type::kInt16:
2351 case DataType::Type::kInt32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002352 case DataType::Type::kInt64:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002353 locations->SetInAt(0, Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002354 break;
2355
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002356 case DataType::Type::kFloat32:
2357 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002358 locations->SetInAt(0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002359 break;
2360
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002361 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002362 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002363 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002364}
2365
2366void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002367 switch (ret->InputAt(0)->GetType()) {
2368 case DataType::Type::kReference:
2369 case DataType::Type::kBool:
2370 case DataType::Type::kUint8:
2371 case DataType::Type::kInt8:
2372 case DataType::Type::kUint16:
2373 case DataType::Type::kInt16:
2374 case DataType::Type::kInt32:
2375 case DataType::Type::kInt64:
2376 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<CpuRegister>().AsRegister(), RAX);
2377 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002378
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002379 case DataType::Type::kFloat32: {
2380 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2381 XMM0);
2382 // To simplify callers of an OSR method, we put the return value in both
2383 // floating point and core register.
2384 if (GetGraph()->IsCompilingOsr()) {
2385 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ false);
2386 }
2387 break;
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002388 }
Nicolas Geoffray57cacb72019-12-08 22:07:08 +00002389 case DataType::Type::kFloat64: {
2390 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>().AsFloatRegister(),
2391 XMM0);
2392 // To simplify callers of an OSR method, we put the return value in both
2393 // floating point and core register.
2394 if (GetGraph()->IsCompilingOsr()) {
2395 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ true);
2396 }
2397 break;
2398 }
2399
2400 default:
2401 LOG(FATAL) << "Unexpected return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002402 }
2403 codegen_->GenerateFrameExit();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002404}
2405
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002406Location InvokeDexCallingConventionVisitorX86_64::GetReturnLocation(DataType::Type type) const {
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002407 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002408 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002409 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002410 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002411 case DataType::Type::kInt8:
2412 case DataType::Type::kUint16:
2413 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -08002414 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002415 case DataType::Type::kInt32:
Aart Bik66c158e2018-01-31 12:55:04 -08002416 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002417 case DataType::Type::kInt64:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002418 return Location::RegisterLocation(RAX);
2419
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002420 case DataType::Type::kVoid:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002421 return Location::NoLocation();
2422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002423 case DataType::Type::kFloat64:
2424 case DataType::Type::kFloat32:
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002425 return Location::FpuRegisterLocation(XMM0);
2426 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01002427
2428 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002429}
2430
2431Location InvokeDexCallingConventionVisitorX86_64::GetMethodLocation() const {
2432 return Location::RegisterLocation(kMethodRegisterArgument);
2433}
2434
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002435Location InvokeDexCallingConventionVisitorX86_64::GetNextLocation(DataType::Type type) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002436 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002437 case DataType::Type::kReference:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002438 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002439 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002440 case DataType::Type::kInt8:
2441 case DataType::Type::kUint16:
2442 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002443 case DataType::Type::kInt32: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002444 uint32_t index = gp_index_++;
2445 stack_index_++;
2446 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002447 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002448 } else {
2449 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2450 }
2451 }
2452
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002453 case DataType::Type::kInt64: {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002454 uint32_t index = gp_index_;
2455 stack_index_ += 2;
2456 if (index < calling_convention.GetNumberOfRegisters()) {
2457 gp_index_ += 1;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002458 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002459 } else {
2460 gp_index_ += 2;
2461 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2462 }
2463 }
2464
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002465 case DataType::Type::kFloat32: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002466 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002467 stack_index_++;
2468 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002469 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002470 } else {
2471 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
2472 }
2473 }
2474
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002475 case DataType::Type::kFloat64: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002476 uint32_t index = float_index_++;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002477 stack_index_ += 2;
2478 if (index < calling_convention.GetNumberOfFpuRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002479 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002480 } else {
2481 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
2482 }
2483 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002484
Aart Bik66c158e2018-01-31 12:55:04 -08002485 case DataType::Type::kUint32:
2486 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002487 case DataType::Type::kVoid:
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002488 LOG(FATAL) << "Unexpected parameter type " << type;
Elliott Hughesc1896c92018-11-29 11:33:18 -08002489 UNREACHABLE();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002490 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00002491 return Location::NoLocation();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002492}
2493
Calin Juravle175dc732015-08-25 15:42:32 +01002494void LocationsBuilderX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2495 // The trampoline uses the same calling convention as dex calling conventions,
2496 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2497 // the method_idx.
2498 HandleInvoke(invoke);
2499}
2500
2501void InstructionCodeGeneratorX86_64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2502 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2503}
2504
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002505void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002506 // Explicit clinit checks triggered by static invokes must have been pruned by
2507 // art::PrepareForRegisterAllocation.
2508 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002509
Mark Mendellfb8d2792015-03-31 22:16:59 -04002510 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002511 if (intrinsic.TryDispatch(invoke)) {
2512 return;
2513 }
2514
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002515 HandleInvoke(invoke);
2516}
2517
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002518static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86_64* codegen) {
2519 if (invoke->GetLocations()->Intrinsified()) {
2520 IntrinsicCodeGeneratorX86_64 intrinsic(codegen);
2521 intrinsic.Dispatch(invoke);
2522 return true;
2523 }
2524 return false;
2525}
2526
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002527void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002528 // Explicit clinit checks triggered by static invokes must have been pruned by
2529 // art::PrepareForRegisterAllocation.
2530 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002531
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002532 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2533 return;
2534 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002535
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002536 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002537 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray38207af2015-06-01 15:46:22 +01002538 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002539}
2540
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002541void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002542 InvokeDexCallingConventionVisitorX86_64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002543 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002544}
2545
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002546void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Mark Mendellfb8d2792015-03-31 22:16:59 -04002547 IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002548 if (intrinsic.TryDispatch(invoke)) {
2549 return;
2550 }
2551
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002552 HandleInvoke(invoke);
2553}
2554
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002555void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -08002556 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2557 return;
2558 }
2559
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002560 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002561 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01002562}
2563
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002564void LocationsBuilderX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2565 HandleInvoke(invoke);
2566 // Add the hidden argument.
2567 invoke->GetLocations()->AddTemp(Location::RegisterLocation(RAX));
2568}
2569
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002570void CodeGeneratorX86_64::MaybeGenerateInlineCacheCheck(HInstruction* instruction,
2571 CpuRegister klass) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002572 DCHECK_EQ(RDI, klass.AsRegister());
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002573 // We know the destination of an intrinsic, so no need to record inline
2574 // caches.
2575 if (!instruction->GetLocations()->Intrinsified() &&
Nicolas Geoffray9b5271e2019-12-04 14:39:46 +00002576 GetGraph()->IsCompilingBaseline() &&
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002577 !Runtime::Current()->IsAotCompiler()) {
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002578 ScopedObjectAccess soa(Thread::Current());
2579 ProfilingInfo* info = GetGraph()->GetArtMethod()->GetProfilingInfo(kRuntimePointerSize);
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002580 InlineCache* cache = info->GetInlineCache(instruction->GetDexPc());
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002581 uint64_t address = reinterpret_cast64<uint64_t>(cache);
2582 NearLabel done;
2583 __ movq(CpuRegister(TMP), Immediate(address));
2584 // Fast path for a monomorphic cache.
2585 __ cmpl(Address(CpuRegister(TMP), InlineCache::ClassesOffset().Int32Value()), klass);
2586 __ j(kEqual, &done);
2587 GenerateInvokeRuntime(
2588 GetThreadOffset<kX86_64PointerSize>(kQuickUpdateInlineCache).Int32Value());
2589 __ Bind(&done);
2590 }
2591}
2592
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002593void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invoke) {
2594 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002595 LocationSummary* locations = invoke->GetLocations();
2596 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
2597 CpuRegister hidden_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002598 Location receiver = locations->InAt(0);
2599 size_t class_offset = mirror::Object::ClassOffset().SizeValue();
2600
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002601 if (receiver.IsStackSlot()) {
2602 __ movl(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002603 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002604 __ movl(temp, Address(temp, class_offset));
2605 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002606 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002607 __ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002608 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002609 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002610 // Instead of simply (possibly) unpoisoning `temp` here, we should
2611 // emit a read barrier for the previous class reference load.
2612 // However this is not required in practice, as this is an
2613 // intermediate/temporary reference and because the current
2614 // concurrent copying collector keeps the from-space memory
2615 // intact/accessible until the end of the marking phase (the
2616 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002617 __ MaybeUnpoisonHeapReference(temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002618
Nicolas Geoffray20036d82019-11-28 16:15:00 +00002619 codegen_->MaybeGenerateInlineCacheCheck(invoke, temp);
Nicolas Geoffraye2a3aa92019-11-25 17:52:58 +00002620
2621 // Set the hidden argument. This is safe to do this here, as RAX
2622 // won't be modified thereafter, before the `call` instruction.
2623 // We also di it after MaybeGenerateInlineCache that may use RAX.
2624 DCHECK_EQ(RAX, hidden_reg.AsRegister());
2625 codegen_->Load64BitValue(hidden_reg, invoke->GetDexMethodIndex());
2626
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002627 // temp = temp->GetAddressOfIMT()
2628 __ movq(temp,
2629 Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
2630 // temp = temp->GetImtEntryAt(method_offset);
2631 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002632 invoke->GetImtIndex(), kX86_64PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002633 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002634 __ movq(temp, Address(temp, method_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002635 // call temp->GetEntryPoint();
Andreas Gampe542451c2016-07-26 09:02:02 -07002636 __ call(Address(
2637 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86_64PointerSize).SizeValue()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002638
2639 DCHECK(!codegen_->IsLeafMethod());
2640 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2641}
2642
Orion Hodsonac141392017-01-13 11:53:47 +00002643void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2644 HandleInvoke(invoke);
2645}
2646
2647void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2648 codegen_->GenerateInvokePolymorphicCall(invoke);
2649}
2650
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002651void LocationsBuilderX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2652 HandleInvoke(invoke);
2653}
2654
2655void InstructionCodeGeneratorX86_64::VisitInvokeCustom(HInvokeCustom* invoke) {
2656 codegen_->GenerateInvokeCustomCall(invoke);
2657}
2658
Roland Levillain88cb1752014-10-20 16:36:47 +01002659void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
2660 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002661 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Roland Levillain88cb1752014-10-20 16:36:47 +01002662 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002663 case DataType::Type::kInt32:
2664 case DataType::Type::kInt64:
Roland Levillain88cb1752014-10-20 16:36:47 +01002665 locations->SetInAt(0, Location::RequiresRegister());
2666 locations->SetOut(Location::SameAsFirstInput());
2667 break;
2668
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002669 case DataType::Type::kFloat32:
2670 case DataType::Type::kFloat64:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002671 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002672 locations->SetOut(Location::SameAsFirstInput());
Roland Levillain5368c212014-11-27 15:03:41 +00002673 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002674 break;
2675
2676 default:
2677 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2678 }
2679}
2680
2681void InstructionCodeGeneratorX86_64::VisitNeg(HNeg* neg) {
2682 LocationSummary* locations = neg->GetLocations();
2683 Location out = locations->Out();
2684 Location in = locations->InAt(0);
2685 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002686 case DataType::Type::kInt32:
Roland Levillain88cb1752014-10-20 16:36:47 +01002687 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002688 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002689 __ negl(out.AsRegister<CpuRegister>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002690 break;
2691
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002692 case DataType::Type::kInt64:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002693 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002694 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002695 __ negq(out.AsRegister<CpuRegister>());
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002696 break;
2697
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002698 case DataType::Type::kFloat32: {
Roland Levillain5368c212014-11-27 15:03:41 +00002699 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002700 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002701 // Implement float negation with an exclusive or with value
2702 // 0x80000000 (mask for bit 31, representing the sign of a
2703 // single-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002704 __ movss(mask, codegen_->LiteralInt32Address(0x80000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002705 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002706 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002707 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002708
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002709 case DataType::Type::kFloat64: {
Roland Levillain5368c212014-11-27 15:03:41 +00002710 DCHECK(in.Equals(out));
Mark Mendell40741f32015-04-20 22:10:34 -04002711 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002712 // Implement double negation with an exclusive or with value
Roland Levillain3dbcb382014-10-28 17:30:07 +00002713 // 0x8000000000000000 (mask for bit 63, representing the sign of
Roland Levillain5368c212014-11-27 15:03:41 +00002714 // a double-precision floating-point number).
Mark Mendell40741f32015-04-20 22:10:34 -04002715 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000)));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002716 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002717 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002718 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002719
2720 default:
2721 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2722 }
2723}
2724
Roland Levillaindff1f282014-11-05 14:15:05 +00002725void LocationsBuilderX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2726 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002727 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002728 DataType::Type result_type = conversion->GetResultType();
2729 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002730 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2731 << input_type << " -> " << result_type;
David Brazdil46e2a392015-03-16 17:31:52 +00002732
Roland Levillaindff1f282014-11-05 14:15:05 +00002733 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002734 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002735 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002736 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002737 case DataType::Type::kInt16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002738 DCHECK(DataType::IsIntegralType(input_type)) << input_type;
2739 locations->SetInAt(0, Location::Any());
2740 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Roland Levillain01a8d712014-11-14 16:27:39 +00002741 break;
2742
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002743 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002744 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002745 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002746 locations->SetInAt(0, Location::Any());
2747 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2748 break;
2749
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002750 case DataType::Type::kFloat32:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002751 locations->SetInAt(0, Location::RequiresFpuRegister());
2752 locations->SetOut(Location::RequiresRegister());
Roland Levillain3f8f9362014-12-02 17:45:01 +00002753 break;
2754
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002755 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002756 locations->SetInAt(0, Location::RequiresFpuRegister());
2757 locations->SetOut(Location::RequiresRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002758 break;
2759
2760 default:
2761 LOG(FATAL) << "Unexpected type conversion from " << input_type
2762 << " to " << result_type;
2763 }
2764 break;
2765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002766 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00002767 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002768 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002769 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002770 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002771 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002772 case DataType::Type::kInt16:
2773 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00002774 // TODO: We would benefit from a (to-be-implemented)
2775 // Location::RegisterOrStackSlot requirement for this input.
2776 locations->SetInAt(0, Location::RequiresRegister());
2777 locations->SetOut(Location::RequiresRegister());
2778 break;
2779
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002780 case DataType::Type::kFloat32:
Roland Levillain624279f2014-12-04 11:54:28 +00002781 locations->SetInAt(0, Location::RequiresFpuRegister());
2782 locations->SetOut(Location::RequiresRegister());
Roland Levillain624279f2014-12-04 11:54:28 +00002783 break;
2784
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002785 case DataType::Type::kFloat64:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002786 locations->SetInAt(0, Location::RequiresFpuRegister());
2787 locations->SetOut(Location::RequiresRegister());
Roland Levillaindff1f282014-11-05 14:15:05 +00002788 break;
2789
2790 default:
2791 LOG(FATAL) << "Unexpected type conversion from " << input_type
2792 << " to " << result_type;
2793 }
2794 break;
2795
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002796 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00002797 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002798 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002799 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002800 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002801 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002802 case DataType::Type::kInt16:
2803 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002804 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002805 locations->SetOut(Location::RequiresFpuRegister());
2806 break;
2807
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002808 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002809 locations->SetInAt(0, Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002810 locations->SetOut(Location::RequiresFpuRegister());
2811 break;
2812
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002813 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04002814 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002815 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002816 break;
2817
2818 default:
2819 LOG(FATAL) << "Unexpected type conversion from " << input_type
2820 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08002821 }
Roland Levillaincff13742014-11-17 14:32:17 +00002822 break;
2823
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002824 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00002825 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002826 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002827 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002828 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002829 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002830 case DataType::Type::kInt16:
2831 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04002832 locations->SetInAt(0, Location::Any());
Roland Levillaincff13742014-11-17 14:32:17 +00002833 locations->SetOut(Location::RequiresFpuRegister());
2834 break;
2835
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002836 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04002837 locations->SetInAt(0, Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002838 locations->SetOut(Location::RequiresFpuRegister());
2839 break;
2840
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002841 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04002842 locations->SetInAt(0, Location::Any());
Roland Levillain8964e2b2014-12-04 12:10:50 +00002843 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002844 break;
2845
2846 default:
2847 LOG(FATAL) << "Unexpected type conversion from " << input_type
2848 << " to " << result_type;
2849 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002850 break;
2851
2852 default:
2853 LOG(FATAL) << "Unexpected type conversion from " << input_type
2854 << " to " << result_type;
2855 }
2856}
2857
2858void InstructionCodeGeneratorX86_64::VisitTypeConversion(HTypeConversion* conversion) {
2859 LocationSummary* locations = conversion->GetLocations();
2860 Location out = locations->Out();
2861 Location in = locations->InAt(0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002862 DataType::Type result_type = conversion->GetResultType();
2863 DataType::Type input_type = conversion->GetInputType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002864 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
2865 << input_type << " -> " << result_type;
Roland Levillaindff1f282014-11-05 14:15:05 +00002866 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002867 case DataType::Type::kUint8:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002868 switch (input_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002869 case DataType::Type::kInt8:
2870 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002871 case DataType::Type::kInt16:
2872 case DataType::Type::kInt32:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002873 case DataType::Type::kInt64:
2874 if (in.IsRegister()) {
2875 __ movzxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2876 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2877 __ movzxb(out.AsRegister<CpuRegister>(),
2878 Address(CpuRegister(RSP), in.GetStackIndex()));
2879 } else {
2880 __ movl(out.AsRegister<CpuRegister>(),
2881 Immediate(static_cast<uint8_t>(Int64FromConstant(in.GetConstant()))));
2882 }
2883 break;
2884
2885 default:
2886 LOG(FATAL) << "Unexpected type conversion from " << input_type
2887 << " to " << result_type;
2888 }
2889 break;
2890
2891 case DataType::Type::kInt8:
2892 switch (input_type) {
2893 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002894 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002895 case DataType::Type::kInt16:
2896 case DataType::Type::kInt32:
2897 case DataType::Type::kInt64:
Roland Levillain51d3fc42014-11-13 14:11:42 +00002898 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002899 __ movsxb(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002900 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002901 __ movsxb(out.AsRegister<CpuRegister>(),
Roland Levillain51d3fc42014-11-13 14:11:42 +00002902 Address(CpuRegister(RSP), in.GetStackIndex()));
2903 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002904 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002905 Immediate(static_cast<int8_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain51d3fc42014-11-13 14:11:42 +00002906 }
2907 break;
2908
2909 default:
2910 LOG(FATAL) << "Unexpected type conversion from " << input_type
2911 << " to " << result_type;
2912 }
2913 break;
2914
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002915 case DataType::Type::kUint16:
2916 switch (input_type) {
2917 case DataType::Type::kInt8:
2918 case DataType::Type::kInt16:
2919 case DataType::Type::kInt32:
2920 case DataType::Type::kInt64:
2921 if (in.IsRegister()) {
2922 __ movzxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
2923 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
2924 __ movzxw(out.AsRegister<CpuRegister>(),
2925 Address(CpuRegister(RSP), in.GetStackIndex()));
2926 } else {
2927 __ movl(out.AsRegister<CpuRegister>(),
2928 Immediate(static_cast<uint16_t>(Int64FromConstant(in.GetConstant()))));
2929 }
2930 break;
2931
2932 default:
2933 LOG(FATAL) << "Unexpected type conversion from " << input_type
2934 << " to " << result_type;
2935 }
2936 break;
2937
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002938 case DataType::Type::kInt16:
Roland Levillain01a8d712014-11-14 16:27:39 +00002939 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002940 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002941 case DataType::Type::kInt32:
2942 case DataType::Type::kInt64:
Roland Levillain01a8d712014-11-14 16:27:39 +00002943 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002944 __ movsxw(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Vladimir Markob52bbde2016-02-12 12:06:05 +00002945 } else if (in.IsStackSlot() || in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002946 __ movsxw(out.AsRegister<CpuRegister>(),
Roland Levillain01a8d712014-11-14 16:27:39 +00002947 Address(CpuRegister(RSP), in.GetStackIndex()));
2948 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002949 __ movl(out.AsRegister<CpuRegister>(),
Vladimir Markob52bbde2016-02-12 12:06:05 +00002950 Immediate(static_cast<int16_t>(Int64FromConstant(in.GetConstant()))));
Roland Levillain01a8d712014-11-14 16:27:39 +00002951 }
2952 break;
2953
2954 default:
2955 LOG(FATAL) << "Unexpected type conversion from " << input_type
2956 << " to " << result_type;
2957 }
2958 break;
2959
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002960 case DataType::Type::kInt32:
Roland Levillain946e1432014-11-11 17:35:19 +00002961 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002962 case DataType::Type::kInt64:
Roland Levillain946e1432014-11-11 17:35:19 +00002963 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002964 __ movl(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillain946e1432014-11-11 17:35:19 +00002965 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002966 __ movl(out.AsRegister<CpuRegister>(),
Roland Levillain946e1432014-11-11 17:35:19 +00002967 Address(CpuRegister(RSP), in.GetStackIndex()));
2968 } else {
2969 DCHECK(in.IsConstant());
2970 DCHECK(in.GetConstant()->IsLongConstant());
2971 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002972 __ movl(out.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002973 }
2974 break;
2975
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002976 case DataType::Type::kFloat32: {
Roland Levillain3f8f9362014-12-02 17:45:01 +00002977 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2978 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002979 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002980
2981 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04002982 // if input >= (float)INT_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07002983 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimIntMax)));
Roland Levillain3f8f9362014-12-02 17:45:01 +00002984 __ j(kAboveEqual, &done);
2985 // if input == NaN goto nan
2986 __ j(kUnordered, &nan);
2987 // output = float-to-int-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00002988 __ cvttss2si(output, input, false);
Roland Levillain3f8f9362014-12-02 17:45:01 +00002989 __ jmp(&done);
2990 __ Bind(&nan);
2991 // output = 0
2992 __ xorl(output, output);
2993 __ Bind(&done);
2994 break;
2995 }
2996
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002997 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002998 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2999 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003000 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003001
3002 __ movl(output, Immediate(kPrimIntMax));
Mark Mendellcfa410b2015-05-25 16:02:44 -04003003 // if input >= (double)INT_MAX goto done
3004 __ comisd(input, codegen_->LiteralDoubleAddress(kPrimIntMax));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003005 __ j(kAboveEqual, &done);
3006 // if input == NaN goto nan
3007 __ j(kUnordered, &nan);
3008 // output = double-to-int-truncate(input)
3009 __ cvttsd2si(output, input);
3010 __ jmp(&done);
3011 __ Bind(&nan);
3012 // output = 0
3013 __ xorl(output, output);
3014 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00003015 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003016 }
Roland Levillain946e1432014-11-11 17:35:19 +00003017
3018 default:
3019 LOG(FATAL) << "Unexpected type conversion from " << input_type
3020 << " to " << result_type;
3021 }
3022 break;
3023
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003024 case DataType::Type::kInt64:
Roland Levillaindff1f282014-11-05 14:15:05 +00003025 switch (input_type) {
3026 DCHECK(out.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003027 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003028 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003029 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003030 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003031 case DataType::Type::kInt16:
3032 case DataType::Type::kInt32:
Roland Levillaindff1f282014-11-05 14:15:05 +00003033 DCHECK(in.IsRegister());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003034 __ movsxd(out.AsRegister<CpuRegister>(), in.AsRegister<CpuRegister>());
Roland Levillaindff1f282014-11-05 14:15:05 +00003035 break;
3036
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003037 case DataType::Type::kFloat32: {
Roland Levillain624279f2014-12-04 11:54:28 +00003038 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3039 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003040 NearLabel done, nan;
Roland Levillain624279f2014-12-04 11:54:28 +00003041
Mark Mendell92e83bf2015-05-07 11:25:03 -04003042 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003043 // if input >= (float)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003044 __ comiss(input, codegen_->LiteralFloatAddress(static_cast<float>(kPrimLongMax)));
Roland Levillain624279f2014-12-04 11:54:28 +00003045 __ j(kAboveEqual, &done);
3046 // if input == NaN goto nan
3047 __ j(kUnordered, &nan);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003048 // output = float-to-long-truncate(input)
Roland Levillain624279f2014-12-04 11:54:28 +00003049 __ cvttss2si(output, input, true);
3050 __ jmp(&done);
3051 __ Bind(&nan);
3052 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003053 __ xorl(output, output);
Roland Levillain624279f2014-12-04 11:54:28 +00003054 __ Bind(&done);
3055 break;
3056 }
3057
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003058 case DataType::Type::kFloat64: {
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003059 XmmRegister input = in.AsFpuRegister<XmmRegister>();
3060 CpuRegister output = out.AsRegister<CpuRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04003061 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003062
Mark Mendell92e83bf2015-05-07 11:25:03 -04003063 codegen_->Load64BitValue(output, kPrimLongMax);
Mark Mendellcfa410b2015-05-25 16:02:44 -04003064 // if input >= (double)LONG_MAX goto done
Nick Desaulniers98e97c62019-10-18 14:25:19 -07003065 __ comisd(input, codegen_->LiteralDoubleAddress(
3066 static_cast<double>(kPrimLongMax)));
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003067 __ j(kAboveEqual, &done);
3068 // if input == NaN goto nan
3069 __ j(kUnordered, &nan);
3070 // output = double-to-long-truncate(input)
3071 __ cvttsd2si(output, input, true);
3072 __ jmp(&done);
3073 __ Bind(&nan);
3074 // output = 0
Mark Mendell92e83bf2015-05-07 11:25:03 -04003075 __ xorl(output, output);
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003076 __ Bind(&done);
Roland Levillaindff1f282014-11-05 14:15:05 +00003077 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00003078 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003079
3080 default:
3081 LOG(FATAL) << "Unexpected type conversion from " << input_type
3082 << " to " << result_type;
3083 }
3084 break;
3085
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003086 case DataType::Type::kFloat32:
Roland Levillaincff13742014-11-17 14:32:17 +00003087 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003088 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003089 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003090 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003091 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003092 case DataType::Type::kInt16:
3093 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003094 if (in.IsRegister()) {
3095 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3096 } else if (in.IsConstant()) {
3097 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3098 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003099 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003100 } else {
3101 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3102 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3103 }
Roland Levillaincff13742014-11-17 14:32:17 +00003104 break;
3105
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003106 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003107 if (in.IsRegister()) {
3108 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3109 } else if (in.IsConstant()) {
3110 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3111 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Pavel Vyssotski4c858cd2016-03-16 13:59:53 +06003112 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003113 } else {
3114 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(),
3115 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3116 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00003117 break;
3118
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003119 case DataType::Type::kFloat64:
Mark Mendell40741f32015-04-20 22:10:34 -04003120 if (in.IsFpuRegister()) {
3121 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3122 } else if (in.IsConstant()) {
3123 double v = in.GetConstant()->AsDoubleConstant()->GetValue();
3124 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003125 codegen_->Load32BitValue(dest, static_cast<float>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003126 } else {
3127 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(),
3128 Address(CpuRegister(RSP), in.GetStackIndex()));
3129 }
Roland Levillaincff13742014-11-17 14:32:17 +00003130 break;
3131
3132 default:
3133 LOG(FATAL) << "Unexpected type conversion from " << input_type
3134 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003135 }
Roland Levillaincff13742014-11-17 14:32:17 +00003136 break;
3137
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003138 case DataType::Type::kFloat64:
Roland Levillaincff13742014-11-17 14:32:17 +00003139 switch (input_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003140 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003141 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003142 case DataType::Type::kInt8:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003143 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003144 case DataType::Type::kInt16:
3145 case DataType::Type::kInt32:
Mark Mendell40741f32015-04-20 22:10:34 -04003146 if (in.IsRegister()) {
3147 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), false);
3148 } else if (in.IsConstant()) {
3149 int32_t v = in.GetConstant()->AsIntConstant()->GetValue();
3150 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003151 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003152 } else {
3153 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3154 Address(CpuRegister(RSP), in.GetStackIndex()), false);
3155 }
Roland Levillaincff13742014-11-17 14:32:17 +00003156 break;
3157
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003158 case DataType::Type::kInt64:
Mark Mendell40741f32015-04-20 22:10:34 -04003159 if (in.IsRegister()) {
3160 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<CpuRegister>(), true);
3161 } else if (in.IsConstant()) {
3162 int64_t v = in.GetConstant()->AsLongConstant()->GetValue();
3163 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003164 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003165 } else {
3166 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(),
3167 Address(CpuRegister(RSP), in.GetStackIndex()), true);
3168 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00003169 break;
3170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003171 case DataType::Type::kFloat32:
Mark Mendell40741f32015-04-20 22:10:34 -04003172 if (in.IsFpuRegister()) {
3173 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
3174 } else if (in.IsConstant()) {
3175 float v = in.GetConstant()->AsFloatConstant()->GetValue();
3176 XmmRegister dest = out.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05003177 codegen_->Load64BitValue(dest, static_cast<double>(v));
Mark Mendell40741f32015-04-20 22:10:34 -04003178 } else {
3179 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(),
3180 Address(CpuRegister(RSP), in.GetStackIndex()));
3181 }
Roland Levillaincff13742014-11-17 14:32:17 +00003182 break;
3183
3184 default:
3185 LOG(FATAL) << "Unexpected type conversion from " << input_type
3186 << " to " << result_type;
Igor Murashkin2ffb7032017-11-08 13:35:21 -08003187 }
Roland Levillaindff1f282014-11-05 14:15:05 +00003188 break;
3189
3190 default:
3191 LOG(FATAL) << "Unexpected type conversion from " << input_type
3192 << " to " << result_type;
3193 }
3194}
3195
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003196void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003197 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003198 new (GetGraph()->GetAllocator()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003199 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003200 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003201 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003202 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
3203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003204 break;
3205 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003206
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003207 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003208 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell09b84632015-02-13 17:48:38 -05003209 // We can use a leaq or addq if the constant can fit in an immediate.
Mark Mendellea5af682015-10-22 17:35:49 -04003210 locations->SetInAt(1, Location::RegisterOrInt32Constant(add->InputAt(1)));
Mark Mendell09b84632015-02-13 17:48:38 -05003211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003212 break;
3213 }
3214
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003215 case DataType::Type::kFloat64:
3216 case DataType::Type::kFloat32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003217 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003218 locations->SetInAt(1, Location::Any());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003219 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003220 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003221 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003222
3223 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003224 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003225 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003226}
3227
3228void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
3229 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003230 Location first = locations->InAt(0);
3231 Location second = locations->InAt(1);
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003232 Location out = locations->Out();
Calin Juravle11351682014-10-23 15:38:15 +01003233
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003234 switch (add->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003235 case DataType::Type::kInt32: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003236 if (second.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003237 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3238 __ addl(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003239 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3240 __ addl(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003241 } else {
3242 __ leal(out.AsRegister<CpuRegister>(), Address(
3243 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3244 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003245 } else if (second.IsConstant()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003246 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3247 __ addl(out.AsRegister<CpuRegister>(),
3248 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3249 } else {
3250 __ leal(out.AsRegister<CpuRegister>(), Address(
3251 first.AsRegister<CpuRegister>(), second.GetConstant()->AsIntConstant()->GetValue()));
3252 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003253 } else {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00003254 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003255 __ addl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003256 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003257 break;
3258 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003260 case DataType::Type::kInt64: {
Mark Mendell09b84632015-02-13 17:48:38 -05003261 if (second.IsRegister()) {
3262 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3263 __ addq(out.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell33bf2452015-05-27 10:08:24 -04003264 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
3265 __ addq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>());
Mark Mendell09b84632015-02-13 17:48:38 -05003266 } else {
3267 __ leaq(out.AsRegister<CpuRegister>(), Address(
3268 first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>(), TIMES_1, 0));
3269 }
3270 } else {
3271 DCHECK(second.IsConstant());
3272 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3273 int32_t int32_value = Low32Bits(value);
3274 DCHECK_EQ(int32_value, value);
3275 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3276 __ addq(out.AsRegister<CpuRegister>(), Immediate(int32_value));
3277 } else {
3278 __ leaq(out.AsRegister<CpuRegister>(), Address(
3279 first.AsRegister<CpuRegister>(), int32_value));
3280 }
3281 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003282 break;
3283 }
3284
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003285 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003286 if (second.IsFpuRegister()) {
3287 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3288 } else if (second.IsConstant()) {
3289 __ addss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003290 codegen_->LiteralFloatAddress(
3291 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003292 } else {
3293 DCHECK(second.IsStackSlot());
3294 __ addss(first.AsFpuRegister<XmmRegister>(),
3295 Address(CpuRegister(RSP), second.GetStackIndex()));
3296 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003297 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003298 }
3299
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003300 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003301 if (second.IsFpuRegister()) {
3302 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3303 } else if (second.IsConstant()) {
3304 __ addsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003305 codegen_->LiteralDoubleAddress(
3306 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003307 } else {
3308 DCHECK(second.IsDoubleStackSlot());
3309 __ addsd(first.AsFpuRegister<XmmRegister>(),
3310 Address(CpuRegister(RSP), second.GetStackIndex()));
3311 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003312 break;
3313 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003314
3315 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003316 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003317 }
3318}
3319
3320void LocationsBuilderX86_64::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003321 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003322 new (GetGraph()->GetAllocator()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003323 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003324 case DataType::Type::kInt32: {
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003325 locations->SetInAt(0, Location::RequiresRegister());
3326 locations->SetInAt(1, Location::Any());
3327 locations->SetOut(Location::SameAsFirstInput());
3328 break;
3329 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003330 case DataType::Type::kInt64: {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003331 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04003332 locations->SetInAt(1, Location::RegisterOrInt32Constant(sub->InputAt(1)));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003333 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003334 break;
3335 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003336 case DataType::Type::kFloat32:
3337 case DataType::Type::kFloat64: {
Calin Juravle11351682014-10-23 15:38:15 +01003338 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003339 locations->SetInAt(1, Location::Any());
Calin Juravle11351682014-10-23 15:38:15 +01003340 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003341 break;
Calin Juravle11351682014-10-23 15:38:15 +01003342 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003343 default:
Calin Juravle11351682014-10-23 15:38:15 +01003344 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003345 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003346}
3347
3348void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
3349 LocationSummary* locations = sub->GetLocations();
Calin Juravle11351682014-10-23 15:38:15 +01003350 Location first = locations->InAt(0);
3351 Location second = locations->InAt(1);
3352 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003353 switch (sub->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003354 case DataType::Type::kInt32: {
Calin Juravle11351682014-10-23 15:38:15 +01003355 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003356 __ subl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle11351682014-10-23 15:38:15 +01003357 } else if (second.IsConstant()) {
3358 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
Roland Levillain271ab9c2014-11-27 15:23:57 +00003359 __ subl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003360 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003361 __ subl(first.AsRegister<CpuRegister>(), Address(CpuRegister(RSP), second.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01003362 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00003363 break;
3364 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003365 case DataType::Type::kInt64: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003366 if (second.IsConstant()) {
3367 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3368 DCHECK(IsInt<32>(value));
3369 __ subq(first.AsRegister<CpuRegister>(), Immediate(static_cast<int32_t>(value)));
3370 } else {
3371 __ subq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
3372 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003373 break;
3374 }
3375
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003376 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003377 if (second.IsFpuRegister()) {
3378 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3379 } else if (second.IsConstant()) {
3380 __ subss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003381 codegen_->LiteralFloatAddress(
3382 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003383 } else {
3384 DCHECK(second.IsStackSlot());
3385 __ subss(first.AsFpuRegister<XmmRegister>(),
3386 Address(CpuRegister(RSP), second.GetStackIndex()));
3387 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003388 break;
Calin Juravle11351682014-10-23 15:38:15 +01003389 }
3390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003391 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003392 if (second.IsFpuRegister()) {
3393 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3394 } else if (second.IsConstant()) {
3395 __ subsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003396 codegen_->LiteralDoubleAddress(
3397 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003398 } else {
3399 DCHECK(second.IsDoubleStackSlot());
3400 __ subsd(first.AsFpuRegister<XmmRegister>(),
3401 Address(CpuRegister(RSP), second.GetStackIndex()));
3402 }
Calin Juravle11351682014-10-23 15:38:15 +01003403 break;
3404 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003405
3406 default:
Calin Juravle11351682014-10-23 15:38:15 +01003407 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01003408 }
3409}
3410
Calin Juravle34bacdf2014-10-07 20:23:36 +01003411void LocationsBuilderX86_64::VisitMul(HMul* mul) {
3412 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003413 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Calin Juravle34bacdf2014-10-07 20:23:36 +01003414 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003415 case DataType::Type::kInt32: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003416 locations->SetInAt(0, Location::RequiresRegister());
3417 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003418 if (mul->InputAt(1)->IsIntConstant()) {
3419 // Can use 3 operand multiply.
3420 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3421 } else {
3422 locations->SetOut(Location::SameAsFirstInput());
3423 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003424 break;
3425 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003426 case DataType::Type::kInt64: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003427 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003428 locations->SetInAt(1, Location::Any());
3429 if (mul->InputAt(1)->IsLongConstant() &&
3430 IsInt<32>(mul->InputAt(1)->AsLongConstant()->GetValue())) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003431 // Can use 3 operand multiply.
3432 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3433 } else {
3434 locations->SetOut(Location::SameAsFirstInput());
3435 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003436 break;
3437 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003438 case DataType::Type::kFloat32:
3439 case DataType::Type::kFloat64: {
Calin Juravleb5bfa962014-10-21 18:02:24 +01003440 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003441 locations->SetInAt(1, Location::Any());
Calin Juravleb5bfa962014-10-21 18:02:24 +01003442 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003443 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003444 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003445
3446 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003447 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003448 }
3449}
3450
3451void InstructionCodeGeneratorX86_64::VisitMul(HMul* mul) {
3452 LocationSummary* locations = mul->GetLocations();
3453 Location first = locations->InAt(0);
3454 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003455 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003456 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003457 case DataType::Type::kInt32:
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003458 // The constant may have ended up in a register, so test explicitly to avoid
3459 // problems where the output may not be the same as the first operand.
3460 if (mul->InputAt(1)->IsIntConstant()) {
3461 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3462 __ imull(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), imm);
3463 } else if (second.IsRegister()) {
3464 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003465 __ imull(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003466 } else {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003467 DCHECK(first.Equals(out));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003468 DCHECK(second.IsStackSlot());
Roland Levillain199f3362014-11-27 17:15:16 +00003469 __ imull(first.AsRegister<CpuRegister>(),
3470 Address(CpuRegister(RSP), second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003471 }
3472 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003473 case DataType::Type::kInt64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003474 // The constant may have ended up in a register, so test explicitly to avoid
3475 // problems where the output may not be the same as the first operand.
3476 if (mul->InputAt(1)->IsLongConstant()) {
3477 int64_t value = mul->InputAt(1)->AsLongConstant()->GetValue();
3478 if (IsInt<32>(value)) {
3479 __ imulq(out.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(),
3480 Immediate(static_cast<int32_t>(value)));
3481 } else {
3482 // Have to use the constant area.
3483 DCHECK(first.Equals(out));
3484 __ imulq(first.AsRegister<CpuRegister>(), codegen_->LiteralInt64Address(value));
3485 }
3486 } else if (second.IsRegister()) {
3487 DCHECK(first.Equals(out));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003488 __ imulq(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003489 } else {
3490 DCHECK(second.IsDoubleStackSlot());
3491 DCHECK(first.Equals(out));
3492 __ imulq(first.AsRegister<CpuRegister>(),
3493 Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04003494 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003495 break;
3496 }
3497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003498 case DataType::Type::kFloat32: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003499 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003500 if (second.IsFpuRegister()) {
3501 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3502 } else if (second.IsConstant()) {
3503 __ mulss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003504 codegen_->LiteralFloatAddress(
3505 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003506 } else {
3507 DCHECK(second.IsStackSlot());
3508 __ mulss(first.AsFpuRegister<XmmRegister>(),
3509 Address(CpuRegister(RSP), second.GetStackIndex()));
3510 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003511 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003512 }
3513
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003514 case DataType::Type::kFloat64: {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003515 DCHECK(first.Equals(out));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003516 if (second.IsFpuRegister()) {
3517 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3518 } else if (second.IsConstant()) {
3519 __ mulsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003520 codegen_->LiteralDoubleAddress(
3521 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003522 } else {
3523 DCHECK(second.IsDoubleStackSlot());
3524 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3525 Address(CpuRegister(RSP), second.GetStackIndex()));
3526 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003527 break;
3528 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003529
3530 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003531 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003532 }
3533}
3534
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003535void InstructionCodeGeneratorX86_64::PushOntoFPStack(Location source, uint32_t temp_offset,
3536 uint32_t stack_adjustment, bool is_float) {
3537 if (source.IsStackSlot()) {
3538 DCHECK(is_float);
3539 __ flds(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3540 } else if (source.IsDoubleStackSlot()) {
3541 DCHECK(!is_float);
3542 __ fldl(Address(CpuRegister(RSP), source.GetStackIndex() + stack_adjustment));
3543 } else {
3544 // Write the value to the temporary location on the stack and load to FP stack.
3545 if (is_float) {
3546 Location stack_temp = Location::StackSlot(temp_offset);
3547 codegen_->Move(stack_temp, source);
3548 __ flds(Address(CpuRegister(RSP), temp_offset));
3549 } else {
3550 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3551 codegen_->Move(stack_temp, source);
3552 __ fldl(Address(CpuRegister(RSP), temp_offset));
3553 }
3554 }
3555}
3556
3557void InstructionCodeGeneratorX86_64::GenerateRemFP(HRem *rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003558 DataType::Type type = rem->GetResultType();
3559 bool is_float = type == DataType::Type::kFloat32;
3560 size_t elem_size = DataType::Size(type);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003561 LocationSummary* locations = rem->GetLocations();
3562 Location first = locations->InAt(0);
3563 Location second = locations->InAt(1);
3564 Location out = locations->Out();
3565
3566 // Create stack space for 2 elements.
3567 // TODO: enhance register allocator to ask for stack temporaries.
3568 __ subq(CpuRegister(RSP), Immediate(2 * elem_size));
3569
3570 // Load the values to the FP stack in reverse order, using temporaries if needed.
3571 PushOntoFPStack(second, elem_size, 2 * elem_size, is_float);
3572 PushOntoFPStack(first, 0, 2 * elem_size, is_float);
3573
3574 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003575 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003576 __ Bind(&retry);
3577 __ fprem();
3578
3579 // Move FP status to AX.
3580 __ fstsw();
3581
3582 // And see if the argument reduction is complete. This is signaled by the
3583 // C2 FPU flag bit set to 0.
3584 __ andl(CpuRegister(RAX), Immediate(kC2ConditionMask));
3585 __ j(kNotEqual, &retry);
3586
3587 // We have settled on the final value. Retrieve it into an XMM register.
3588 // Store FP top of stack to real stack.
3589 if (is_float) {
3590 __ fsts(Address(CpuRegister(RSP), 0));
3591 } else {
3592 __ fstl(Address(CpuRegister(RSP), 0));
3593 }
3594
3595 // Pop the 2 items from the FP stack.
3596 __ fucompp();
3597
3598 // Load the value from the stack into an XMM register.
3599 DCHECK(out.IsFpuRegister()) << out;
3600 if (is_float) {
3601 __ movss(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3602 } else {
3603 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(CpuRegister(RSP), 0));
3604 }
3605
3606 // And remove the temporary stack space we allocated.
3607 __ addq(CpuRegister(RSP), Immediate(2 * elem_size));
3608}
3609
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003610void InstructionCodeGeneratorX86_64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3611 DCHECK(instruction->IsDiv() || instruction->IsRem());
3612
3613 LocationSummary* locations = instruction->GetLocations();
3614 Location second = locations->InAt(1);
3615 DCHECK(second.IsConstant());
3616
3617 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3618 CpuRegister input_register = locations->InAt(0).AsRegister<CpuRegister>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003619 int64_t imm = Int64FromConstant(second.GetConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003620
3621 DCHECK(imm == 1 || imm == -1);
3622
3623 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003624 case DataType::Type::kInt32: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003625 if (instruction->IsRem()) {
3626 __ xorl(output_register, output_register);
3627 } else {
3628 __ movl(output_register, input_register);
3629 if (imm == -1) {
3630 __ negl(output_register);
3631 }
3632 }
3633 break;
3634 }
3635
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003636 case DataType::Type::kInt64: {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003637 if (instruction->IsRem()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003638 __ xorl(output_register, output_register);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003639 } else {
3640 __ movq(output_register, input_register);
3641 if (imm == -1) {
3642 __ negq(output_register);
3643 }
3644 }
3645 break;
3646 }
3647
3648 default:
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003649 LOG(FATAL) << "Unexpected type for div by (-)1 " << instruction->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003650 }
3651}
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303652void InstructionCodeGeneratorX86_64::RemByPowerOfTwo(HRem* instruction) {
3653 LocationSummary* locations = instruction->GetLocations();
3654 Location second = locations->InAt(1);
3655 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3656 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3657 int64_t imm = Int64FromConstant(second.GetConstant());
3658 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3659 uint64_t abs_imm = AbsOrMin(imm);
3660 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3661 if (instruction->GetResultType() == DataType::Type::kInt32) {
3662 NearLabel done;
3663 __ movl(out, numerator);
3664 __ andl(out, Immediate(abs_imm-1));
3665 __ j(Condition::kZero, &done);
3666 __ leal(tmp, Address(out, static_cast<int32_t>(~(abs_imm-1))));
3667 __ testl(numerator, numerator);
3668 __ cmov(Condition::kLess, out, tmp, false);
3669 __ Bind(&done);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003670
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303671 } else {
3672 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
3673 codegen_->Load64BitValue(tmp, abs_imm - 1);
3674 NearLabel done;
3675
3676 __ movq(out, numerator);
3677 __ andq(out, tmp);
3678 __ j(Condition::kZero, &done);
3679 __ movq(tmp, numerator);
3680 __ sarq(tmp, Immediate(63));
3681 __ shlq(tmp, Immediate(WhichPowerOf2(abs_imm)));
3682 __ orq(out, tmp);
3683 __ Bind(&done);
3684 }
3685}
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003686void InstructionCodeGeneratorX86_64::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003687 LocationSummary* locations = instruction->GetLocations();
3688 Location second = locations->InAt(1);
3689
3690 CpuRegister output_register = locations->Out().AsRegister<CpuRegister>();
3691 CpuRegister numerator = locations->InAt(0).AsRegister<CpuRegister>();
3692
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003693 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003694 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3695 uint64_t abs_imm = AbsOrMin(imm);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003696
3697 CpuRegister tmp = locations->GetTemp(0).AsRegister<CpuRegister>();
3698
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003699 if (instruction->GetResultType() == DataType::Type::kInt32) {
Atul Bajaj1cc73292018-11-15 11:36:53 +05303700 // When denominator is equal to 2, we can add signed bit and numerator to tmp.
3701 // Below we are using addl instruction instead of cmov which give us 1 cycle benefit.
3702 if (abs_imm == 2) {
3703 __ leal(tmp, Address(numerator, 0));
3704 __ shrl(tmp, Immediate(31));
3705 __ addl(tmp, numerator);
3706 } else {
3707 __ leal(tmp, Address(numerator, abs_imm - 1));
3708 __ testl(numerator, numerator);
3709 __ cmov(kGreaterEqual, tmp, numerator);
3710 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003711 int shift = CTZ(imm);
3712 __ sarl(tmp, Immediate(shift));
3713
3714 if (imm < 0) {
3715 __ negl(tmp);
3716 }
3717
3718 __ movl(output_register, tmp);
3719 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003720 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003721 CpuRegister rdx = locations->GetTemp(0).AsRegister<CpuRegister>();
Atul Bajaj1cc73292018-11-15 11:36:53 +05303722 if (abs_imm == 2) {
3723 __ movq(rdx, numerator);
3724 __ shrq(rdx, Immediate(63));
3725 __ addq(rdx, numerator);
3726 } else {
3727 codegen_->Load64BitValue(rdx, abs_imm - 1);
3728 __ addq(rdx, numerator);
3729 __ testq(numerator, numerator);
3730 __ cmov(kGreaterEqual, rdx, numerator);
3731 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003732 int shift = CTZ(imm);
3733 __ sarq(rdx, Immediate(shift));
3734
3735 if (imm < 0) {
3736 __ negq(rdx);
3737 }
3738
3739 __ movq(output_register, rdx);
3740 }
3741}
3742
3743void InstructionCodeGeneratorX86_64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3744 DCHECK(instruction->IsDiv() || instruction->IsRem());
3745
3746 LocationSummary* locations = instruction->GetLocations();
3747 Location second = locations->InAt(1);
3748
3749 CpuRegister numerator = instruction->IsDiv() ? locations->GetTemp(1).AsRegister<CpuRegister>()
3750 : locations->GetTemp(0).AsRegister<CpuRegister>();
3751 CpuRegister eax = locations->InAt(0).AsRegister<CpuRegister>();
3752 CpuRegister edx = instruction->IsDiv() ? locations->GetTemp(0).AsRegister<CpuRegister>()
3753 : locations->Out().AsRegister<CpuRegister>();
3754 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3755
3756 DCHECK_EQ(RAX, eax.AsRegister());
3757 DCHECK_EQ(RDX, edx.AsRegister());
3758 if (instruction->IsDiv()) {
3759 DCHECK_EQ(RAX, out.AsRegister());
3760 } else {
3761 DCHECK_EQ(RDX, out.AsRegister());
3762 }
3763
3764 int64_t magic;
3765 int shift;
3766
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003767 // TODO: can these branches be written as one?
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003768 if (instruction->GetResultType() == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003769 int imm = second.GetConstant()->AsIntConstant()->GetValue();
3770
Andreas Gampe3db70682018-12-26 15:12:03 -08003771 CalculateMagicAndShiftForDivRem(imm, false /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003772
3773 __ movl(numerator, eax);
3774
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003775 __ movl(eax, Immediate(magic));
3776 __ imull(numerator);
3777
3778 if (imm > 0 && magic < 0) {
3779 __ addl(edx, numerator);
3780 } else if (imm < 0 && magic > 0) {
3781 __ subl(edx, numerator);
3782 }
3783
3784 if (shift != 0) {
3785 __ sarl(edx, Immediate(shift));
3786 }
3787
3788 __ movl(eax, edx);
3789 __ shrl(edx, Immediate(31));
3790 __ addl(edx, eax);
3791
3792 if (instruction->IsRem()) {
3793 __ movl(eax, numerator);
3794 __ imull(edx, Immediate(imm));
3795 __ subl(eax, edx);
3796 __ movl(edx, eax);
3797 } else {
3798 __ movl(eax, edx);
3799 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003800 } else {
3801 int64_t imm = second.GetConstant()->AsLongConstant()->GetValue();
3802
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003803 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003804
3805 CpuRegister rax = eax;
3806 CpuRegister rdx = edx;
3807
Andreas Gampe3db70682018-12-26 15:12:03 -08003808 CalculateMagicAndShiftForDivRem(imm, true /* is_long= */, &magic, &shift);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003809
3810 // Save the numerator.
3811 __ movq(numerator, rax);
3812
3813 // RAX = magic
Mark Mendell92e83bf2015-05-07 11:25:03 -04003814 codegen_->Load64BitValue(rax, magic);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003815
3816 // RDX:RAX = magic * numerator
3817 __ imulq(numerator);
3818
3819 if (imm > 0 && magic < 0) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003820 // RDX += numerator
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003821 __ addq(rdx, numerator);
3822 } else if (imm < 0 && magic > 0) {
3823 // RDX -= numerator
3824 __ subq(rdx, numerator);
3825 }
3826
3827 // Shift if needed.
3828 if (shift != 0) {
3829 __ sarq(rdx, Immediate(shift));
3830 }
3831
3832 // RDX += 1 if RDX < 0
3833 __ movq(rax, rdx);
3834 __ shrq(rdx, Immediate(63));
3835 __ addq(rdx, rax);
3836
3837 if (instruction->IsRem()) {
3838 __ movq(rax, numerator);
3839
3840 if (IsInt<32>(imm)) {
3841 __ imulq(rdx, Immediate(static_cast<int32_t>(imm)));
3842 } else {
Mark Mendell92e83bf2015-05-07 11:25:03 -04003843 __ imulq(rdx, codegen_->LiteralInt64Address(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003844 }
3845
3846 __ subq(rax, rdx);
3847 __ movq(rdx, rax);
3848 } else {
3849 __ movq(rax, rdx);
3850 }
3851 }
3852}
3853
Calin Juravlebacfec32014-11-14 15:54:36 +00003854void InstructionCodeGeneratorX86_64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3855 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003856 DataType::Type type = instruction->GetResultType();
3857 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Calin Juravlebacfec32014-11-14 15:54:36 +00003858
3859 bool is_div = instruction->IsDiv();
3860 LocationSummary* locations = instruction->GetLocations();
3861
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003862 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
3863 Location second = locations->InAt(1);
Calin Juravlebacfec32014-11-14 15:54:36 +00003864
Roland Levillain271ab9c2014-11-27 15:23:57 +00003865 DCHECK_EQ(RAX, locations->InAt(0).AsRegister<CpuRegister>().AsRegister());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003866 DCHECK_EQ(is_div ? RAX : RDX, out.AsRegister());
Calin Juravlebacfec32014-11-14 15:54:36 +00003867
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003868 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003869 int64_t imm = Int64FromConstant(second.GetConstant());
Calin Juravlebacfec32014-11-14 15:54:36 +00003870
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003871 if (imm == 0) {
3872 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3873 } else if (imm == 1 || imm == -1) {
3874 DivRemOneOrMinusOne(instruction);
Shalini Salomi Bodapatia66784b2018-11-06 13:05:44 +05303875 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
3876 if (is_div) {
3877 DivByPowerOfTwo(instruction->AsDiv());
3878 } else {
3879 RemByPowerOfTwo(instruction->AsRem());
3880 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003881 } else {
3882 DCHECK(imm <= -2 || imm >= 2);
3883 GenerateDivRemWithAnyConstant(instruction);
3884 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003885 } else {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003886 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003887 new (codegen_->GetScopedAllocator()) DivRemMinusOneSlowPathX86_64(
David Srbecky9cd6d372016-02-09 15:24:47 +00003888 instruction, out.AsRegister(), type, is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003889 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003890
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003891 CpuRegister second_reg = second.AsRegister<CpuRegister>();
3892 // 0x80000000(00000000)/-1 triggers an arithmetic exception!
3893 // Dividing by -1 is actually negation and -0x800000000(00000000) = 0x80000000(00000000)
3894 // so it's safe to just use negl instead of more complex comparisons.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003895 if (type == DataType::Type::kInt32) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003896 __ cmpl(second_reg, Immediate(-1));
3897 __ j(kEqual, slow_path->GetEntryLabel());
3898 // edx:eax <- sign-extended of eax
3899 __ cdq();
3900 // eax = quotient, edx = remainder
3901 __ idivl(second_reg);
3902 } else {
3903 __ cmpq(second_reg, Immediate(-1));
3904 __ j(kEqual, slow_path->GetEntryLabel());
3905 // rdx:rax <- sign-extended of rax
3906 __ cqo();
3907 // rax = quotient, rdx = remainder
3908 __ idivq(second_reg);
3909 }
3910 __ Bind(slow_path->GetExitLabel());
3911 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003912}
3913
Calin Juravle7c4954d2014-10-28 16:57:40 +00003914void LocationsBuilderX86_64::VisitDiv(HDiv* div) {
3915 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003916 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003917 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003918 case DataType::Type::kInt32:
3919 case DataType::Type::kInt64: {
Calin Juravled0d48522014-11-04 16:40:20 +00003920 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003921 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003922 locations->SetOut(Location::SameAsFirstInput());
3923 // Intel uses edx:eax as the dividend.
3924 locations->AddTemp(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003925 // We need to save the numerator while we tweak rax and rdx. As we are using imul in a way
3926 // which enforces results to be in RAX and RDX, things are simpler if we use RDX also as
3927 // output and request another temp.
3928 if (div->InputAt(1)->IsConstant()) {
3929 locations->AddTemp(Location::RequiresRegister());
3930 }
Calin Juravled0d48522014-11-04 16:40:20 +00003931 break;
3932 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003933
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003934 case DataType::Type::kFloat32:
3935 case DataType::Type::kFloat64: {
Calin Juravle7c4954d2014-10-28 16:57:40 +00003936 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendellf55c3e02015-03-26 21:07:46 -04003937 locations->SetInAt(1, Location::Any());
Calin Juravle7c4954d2014-10-28 16:57:40 +00003938 locations->SetOut(Location::SameAsFirstInput());
3939 break;
3940 }
3941
3942 default:
3943 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3944 }
3945}
3946
3947void InstructionCodeGeneratorX86_64::VisitDiv(HDiv* div) {
3948 LocationSummary* locations = div->GetLocations();
3949 Location first = locations->InAt(0);
3950 Location second = locations->InAt(1);
3951 DCHECK(first.Equals(locations->Out()));
3952
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003953 DataType::Type type = div->GetResultType();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003954 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003955 case DataType::Type::kInt32:
3956 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003957 GenerateDivRemIntegral(div);
Calin Juravled0d48522014-11-04 16:40:20 +00003958 break;
3959 }
3960
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003961 case DataType::Type::kFloat32: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003962 if (second.IsFpuRegister()) {
3963 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3964 } else if (second.IsConstant()) {
3965 __ divss(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003966 codegen_->LiteralFloatAddress(
3967 second.GetConstant()->AsFloatConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003968 } else {
3969 DCHECK(second.IsStackSlot());
3970 __ divss(first.AsFpuRegister<XmmRegister>(),
3971 Address(CpuRegister(RSP), second.GetStackIndex()));
3972 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003973 break;
3974 }
3975
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003976 case DataType::Type::kFloat64: {
Mark Mendellf55c3e02015-03-26 21:07:46 -04003977 if (second.IsFpuRegister()) {
3978 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3979 } else if (second.IsConstant()) {
3980 __ divsd(first.AsFpuRegister<XmmRegister>(),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00003981 codegen_->LiteralDoubleAddress(
3982 second.GetConstant()->AsDoubleConstant()->GetValue()));
Mark Mendellf55c3e02015-03-26 21:07:46 -04003983 } else {
3984 DCHECK(second.IsDoubleStackSlot());
3985 __ divsd(first.AsFpuRegister<XmmRegister>(),
3986 Address(CpuRegister(RSP), second.GetStackIndex()));
3987 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003988 break;
3989 }
3990
3991 default:
3992 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3993 }
3994}
3995
Calin Juravlebacfec32014-11-14 15:54:36 +00003996void LocationsBuilderX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003997 DataType::Type type = rem->GetResultType();
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003998 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003999 new (GetGraph()->GetAllocator()) LocationSummary(rem, LocationSummary::kNoCall);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004000
4001 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004002 case DataType::Type::kInt32:
4003 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004004 locations->SetInAt(0, Location::RegisterLocation(RAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004005 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00004006 // Intel uses rdx:rax as the dividend and puts the remainder in rdx
4007 locations->SetOut(Location::RegisterLocation(RDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01004008 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
4009 // which enforces results to be in RAX and RDX, things are simpler if we use EAX also as
4010 // output and request another temp.
4011 if (rem->InputAt(1)->IsConstant()) {
4012 locations->AddTemp(Location::RequiresRegister());
4013 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004014 break;
4015 }
4016
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004017 case DataType::Type::kFloat32:
4018 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004019 locations->SetInAt(0, Location::Any());
4020 locations->SetInAt(1, Location::Any());
4021 locations->SetOut(Location::RequiresFpuRegister());
4022 locations->AddTemp(Location::RegisterLocation(RAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00004023 break;
4024 }
4025
4026 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00004027 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00004028 }
4029}
4030
4031void InstructionCodeGeneratorX86_64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004032 DataType::Type type = rem->GetResultType();
Calin Juravlebacfec32014-11-14 15:54:36 +00004033 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004034 case DataType::Type::kInt32:
4035 case DataType::Type::kInt64: {
Calin Juravlebacfec32014-11-14 15:54:36 +00004036 GenerateDivRemIntegral(rem);
4037 break;
4038 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004039 case DataType::Type::kFloat32:
4040 case DataType::Type::kFloat64: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05004041 GenerateRemFP(rem);
Calin Juravled2ec87d2014-12-08 14:24:46 +00004042 break;
4043 }
Calin Juravlebacfec32014-11-14 15:54:36 +00004044 default:
4045 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
4046 }
4047}
4048
Aart Bik1f8d51b2018-02-15 10:42:37 -08004049static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
4050 LocationSummary* locations = new (allocator) LocationSummary(minmax);
4051 switch (minmax->GetResultType()) {
4052 case DataType::Type::kInt32:
4053 case DataType::Type::kInt64:
4054 locations->SetInAt(0, Location::RequiresRegister());
4055 locations->SetInAt(1, Location::RequiresRegister());
4056 locations->SetOut(Location::SameAsFirstInput());
4057 break;
4058 case DataType::Type::kFloat32:
4059 case DataType::Type::kFloat64:
4060 locations->SetInAt(0, Location::RequiresFpuRegister());
4061 locations->SetInAt(1, Location::RequiresFpuRegister());
4062 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
4063 // the second input to be the output (we can simply swap inputs).
4064 locations->SetOut(Location::SameAsFirstInput());
4065 break;
4066 default:
4067 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
4068 }
4069}
4070
Aart Bik351df3e2018-03-07 11:54:57 -08004071void InstructionCodeGeneratorX86_64::GenerateMinMaxInt(LocationSummary* locations,
4072 bool is_min,
4073 DataType::Type type) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08004074 Location op1_loc = locations->InAt(0);
4075 Location op2_loc = locations->InAt(1);
4076
4077 // Shortcut for same input locations.
4078 if (op1_loc.Equals(op2_loc)) {
4079 // Can return immediately, as op1_loc == out_loc.
4080 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
4081 // a copy here.
4082 DCHECK(locations->Out().Equals(op1_loc));
4083 return;
4084 }
4085
4086 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4087 CpuRegister op2 = op2_loc.AsRegister<CpuRegister>();
4088
4089 // (out := op1)
4090 // out <=? op2
4091 // if out is min jmp done
4092 // out := op2
4093 // done:
4094
4095 if (type == DataType::Type::kInt64) {
4096 __ cmpq(out, op2);
4097 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ true);
4098 } else {
4099 DCHECK_EQ(type, DataType::Type::kInt32);
4100 __ cmpl(out, op2);
4101 __ cmov(is_min ? Condition::kGreater : Condition::kLess, out, op2, /*is64bit*/ false);
4102 }
4103}
4104
4105void InstructionCodeGeneratorX86_64::GenerateMinMaxFP(LocationSummary* locations,
4106 bool is_min,
4107 DataType::Type type) {
4108 Location op1_loc = locations->InAt(0);
4109 Location op2_loc = locations->InAt(1);
4110 Location out_loc = locations->Out();
4111 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
4112
4113 // Shortcut for same input locations.
4114 if (op1_loc.Equals(op2_loc)) {
4115 DCHECK(out_loc.Equals(op1_loc));
4116 return;
4117 }
4118
4119 // (out := op1)
4120 // out <=? op2
4121 // if Nan jmp Nan_label
4122 // if out is min jmp done
4123 // if op2 is min jmp op2_label
4124 // handle -0/+0
4125 // jmp done
4126 // Nan_label:
4127 // out := NaN
4128 // op2_label:
4129 // out := op2
4130 // done:
4131 //
4132 // This removes one jmp, but needs to copy one input (op1) to out.
4133 //
4134 // TODO: This is straight from Quick. Make NaN an out-of-line slowpath?
4135
4136 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
4137
4138 NearLabel nan, done, op2_label;
4139 if (type == DataType::Type::kFloat64) {
4140 __ ucomisd(out, op2);
4141 } else {
4142 DCHECK_EQ(type, DataType::Type::kFloat32);
4143 __ ucomiss(out, op2);
4144 }
4145
4146 __ j(Condition::kParityEven, &nan);
4147
4148 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
4149 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
4150
4151 // Handle 0.0/-0.0.
4152 if (is_min) {
4153 if (type == DataType::Type::kFloat64) {
4154 __ orpd(out, op2);
4155 } else {
4156 __ orps(out, op2);
4157 }
4158 } else {
4159 if (type == DataType::Type::kFloat64) {
4160 __ andpd(out, op2);
4161 } else {
4162 __ andps(out, op2);
4163 }
4164 }
4165 __ jmp(&done);
4166
4167 // NaN handling.
4168 __ Bind(&nan);
4169 if (type == DataType::Type::kFloat64) {
4170 __ movsd(out, codegen_->LiteralInt64Address(INT64_C(0x7FF8000000000000)));
4171 } else {
4172 __ movss(out, codegen_->LiteralInt32Address(INT32_C(0x7FC00000)));
4173 }
4174 __ jmp(&done);
4175
4176 // out := op2;
4177 __ Bind(&op2_label);
4178 if (type == DataType::Type::kFloat64) {
4179 __ movsd(out, op2);
4180 } else {
4181 __ movss(out, op2);
4182 }
4183
4184 // Done.
4185 __ Bind(&done);
4186}
4187
Aart Bik351df3e2018-03-07 11:54:57 -08004188void InstructionCodeGeneratorX86_64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
4189 DataType::Type type = minmax->GetResultType();
4190 switch (type) {
4191 case DataType::Type::kInt32:
4192 case DataType::Type::kInt64:
4193 GenerateMinMaxInt(minmax->GetLocations(), is_min, type);
4194 break;
4195 case DataType::Type::kFloat32:
4196 case DataType::Type::kFloat64:
4197 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
4198 break;
4199 default:
4200 LOG(FATAL) << "Unexpected type for HMinMax " << type;
4201 }
4202}
4203
Aart Bik1f8d51b2018-02-15 10:42:37 -08004204void LocationsBuilderX86_64::VisitMin(HMin* min) {
4205 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
4206}
4207
4208void InstructionCodeGeneratorX86_64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08004209 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004210}
4211
4212void LocationsBuilderX86_64::VisitMax(HMax* max) {
4213 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
4214}
4215
4216void InstructionCodeGeneratorX86_64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08004217 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08004218}
4219
Aart Bik3dad3412018-02-28 12:01:46 -08004220void LocationsBuilderX86_64::VisitAbs(HAbs* abs) {
4221 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
4222 switch (abs->GetResultType()) {
4223 case DataType::Type::kInt32:
4224 case DataType::Type::kInt64:
4225 locations->SetInAt(0, Location::RequiresRegister());
4226 locations->SetOut(Location::SameAsFirstInput());
4227 locations->AddTemp(Location::RequiresRegister());
4228 break;
4229 case DataType::Type::kFloat32:
4230 case DataType::Type::kFloat64:
4231 locations->SetInAt(0, Location::RequiresFpuRegister());
4232 locations->SetOut(Location::SameAsFirstInput());
4233 locations->AddTemp(Location::RequiresFpuRegister());
4234 break;
4235 default:
4236 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4237 }
4238}
4239
4240void InstructionCodeGeneratorX86_64::VisitAbs(HAbs* abs) {
4241 LocationSummary* locations = abs->GetLocations();
4242 switch (abs->GetResultType()) {
4243 case DataType::Type::kInt32: {
4244 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4245 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4246 // Create mask.
4247 __ movl(mask, out);
4248 __ sarl(mask, Immediate(31));
4249 // Add mask.
4250 __ addl(out, mask);
4251 __ xorl(out, mask);
4252 break;
4253 }
4254 case DataType::Type::kInt64: {
4255 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
4256 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>();
4257 // Create mask.
4258 __ movq(mask, out);
4259 __ sarq(mask, Immediate(63));
4260 // Add mask.
4261 __ addq(out, mask);
4262 __ xorq(out, mask);
4263 break;
4264 }
4265 case DataType::Type::kFloat32: {
4266 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4267 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4268 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x7FFFFFFF)));
4269 __ andps(out, mask);
4270 break;
4271 }
4272 case DataType::Type::kFloat64: {
4273 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
4274 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4275 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF)));
4276 __ andpd(out, mask);
4277 break;
4278 }
4279 default:
4280 LOG(FATAL) << "Unexpected type for HAbs " << abs->GetResultType();
4281 }
4282}
4283
Calin Juravled0d48522014-11-04 16:40:20 +00004284void LocationsBuilderX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004285 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004286 locations->SetInAt(0, Location::Any());
Calin Juravled0d48522014-11-04 16:40:20 +00004287}
4288
4289void InstructionCodeGeneratorX86_64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07004290 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004291 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathX86_64(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00004292 codegen_->AddSlowPath(slow_path);
4293
4294 LocationSummary* locations = instruction->GetLocations();
4295 Location value = locations->InAt(0);
4296
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004297 switch (instruction->GetType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004298 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004299 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004300 case DataType::Type::kInt8:
4301 case DataType::Type::kUint16:
4302 case DataType::Type::kInt16:
4303 case DataType::Type::kInt32: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004304 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004305 __ testl(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004306 __ j(kEqual, slow_path->GetEntryLabel());
4307 } else if (value.IsStackSlot()) {
4308 __ cmpl(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4309 __ j(kEqual, slow_path->GetEntryLabel());
4310 } else {
4311 DCHECK(value.IsConstant()) << value;
4312 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004313 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004314 }
4315 }
4316 break;
Calin Juravled0d48522014-11-04 16:40:20 +00004317 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004318 case DataType::Type::kInt64: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004319 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004320 __ testq(value.AsRegister<CpuRegister>(), value.AsRegister<CpuRegister>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004321 __ j(kEqual, slow_path->GetEntryLabel());
4322 } else if (value.IsDoubleStackSlot()) {
4323 __ cmpq(Address(CpuRegister(RSP), value.GetStackIndex()), Immediate(0));
4324 __ j(kEqual, slow_path->GetEntryLabel());
4325 } else {
4326 DCHECK(value.IsConstant()) << value;
4327 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01004328 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00004329 }
4330 }
4331 break;
4332 }
4333 default:
4334 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00004335 }
Calin Juravled0d48522014-11-04 16:40:20 +00004336}
4337
Calin Juravle9aec02f2014-11-18 23:06:35 +00004338void LocationsBuilderX86_64::HandleShift(HBinaryOperation* op) {
4339 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4340
4341 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004342 new (GetGraph()->GetAllocator()) LocationSummary(op, LocationSummary::kNoCall);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004343
4344 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004345 case DataType::Type::kInt32:
4346 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004347 locations->SetInAt(0, Location::RequiresRegister());
4348 // The shift count needs to be in CL.
4349 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, op->InputAt(1)));
4350 locations->SetOut(Location::SameAsFirstInput());
4351 break;
4352 }
4353 default:
4354 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
4355 }
4356}
4357
4358void InstructionCodeGeneratorX86_64::HandleShift(HBinaryOperation* op) {
4359 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
4360
4361 LocationSummary* locations = op->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004362 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004363 Location second = locations->InAt(1);
4364
4365 switch (op->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004366 case DataType::Type::kInt32: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004367 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004368 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004369 if (op->IsShl()) {
4370 __ shll(first_reg, second_reg);
4371 } else if (op->IsShr()) {
4372 __ sarl(first_reg, second_reg);
4373 } else {
4374 __ shrl(first_reg, second_reg);
4375 }
4376 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004377 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004378 if (op->IsShl()) {
4379 __ shll(first_reg, imm);
4380 } else if (op->IsShr()) {
4381 __ sarl(first_reg, imm);
4382 } else {
4383 __ shrl(first_reg, imm);
4384 }
4385 }
4386 break;
4387 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004388 case DataType::Type::kInt64: {
Calin Juravle9aec02f2014-11-18 23:06:35 +00004389 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00004390 CpuRegister second_reg = second.AsRegister<CpuRegister>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004391 if (op->IsShl()) {
4392 __ shlq(first_reg, second_reg);
4393 } else if (op->IsShr()) {
4394 __ sarq(first_reg, second_reg);
4395 } else {
4396 __ shrq(first_reg, second_reg);
4397 }
4398 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004399 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Calin Juravle9aec02f2014-11-18 23:06:35 +00004400 if (op->IsShl()) {
4401 __ shlq(first_reg, imm);
4402 } else if (op->IsShr()) {
4403 __ sarq(first_reg, imm);
4404 } else {
4405 __ shrq(first_reg, imm);
4406 }
4407 }
4408 break;
4409 }
4410 default:
4411 LOG(FATAL) << "Unexpected operation type " << op->GetResultType();
Vladimir Marko351dddf2015-12-11 16:34:46 +00004412 UNREACHABLE();
Calin Juravle9aec02f2014-11-18 23:06:35 +00004413 }
4414}
4415
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004416void LocationsBuilderX86_64::VisitRor(HRor* ror) {
4417 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004418 new (GetGraph()->GetAllocator()) LocationSummary(ror, LocationSummary::kNoCall);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004419
4420 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004421 case DataType::Type::kInt32:
4422 case DataType::Type::kInt64: {
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004423 locations->SetInAt(0, Location::RequiresRegister());
4424 // The shift count needs to be in CL (unless it is a constant).
4425 locations->SetInAt(1, Location::ByteRegisterOrConstant(RCX, ror->InputAt(1)));
4426 locations->SetOut(Location::SameAsFirstInput());
4427 break;
4428 }
4429 default:
4430 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4431 UNREACHABLE();
4432 }
4433}
4434
4435void InstructionCodeGeneratorX86_64::VisitRor(HRor* ror) {
4436 LocationSummary* locations = ror->GetLocations();
4437 CpuRegister first_reg = locations->InAt(0).AsRegister<CpuRegister>();
4438 Location second = locations->InAt(1);
4439
4440 switch (ror->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004441 case DataType::Type::kInt32:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004442 if (second.IsRegister()) {
4443 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4444 __ rorl(first_reg, second_reg);
4445 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004446 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004447 __ rorl(first_reg, imm);
4448 }
4449 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004450 case DataType::Type::kInt64:
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004451 if (second.IsRegister()) {
4452 CpuRegister second_reg = second.AsRegister<CpuRegister>();
4453 __ rorq(first_reg, second_reg);
4454 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004455 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004456 __ rorq(first_reg, imm);
4457 }
4458 break;
4459 default:
4460 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4461 UNREACHABLE();
4462 }
4463}
4464
Calin Juravle9aec02f2014-11-18 23:06:35 +00004465void LocationsBuilderX86_64::VisitShl(HShl* shl) {
4466 HandleShift(shl);
4467}
4468
4469void InstructionCodeGeneratorX86_64::VisitShl(HShl* shl) {
4470 HandleShift(shl);
4471}
4472
4473void LocationsBuilderX86_64::VisitShr(HShr* shr) {
4474 HandleShift(shr);
4475}
4476
4477void InstructionCodeGeneratorX86_64::VisitShr(HShr* shr) {
4478 HandleShift(shr);
4479}
4480
4481void LocationsBuilderX86_64::VisitUShr(HUShr* ushr) {
4482 HandleShift(ushr);
4483}
4484
4485void InstructionCodeGeneratorX86_64::VisitUShr(HUShr* ushr) {
4486 HandleShift(ushr);
4487}
4488
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004489void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004490 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4491 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004492 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07004493 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004494 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004495}
4496
4497void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07004498 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
4499 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4500 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004501}
4502
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004503void LocationsBuilderX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004504 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4505 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004506 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004507 locations->SetOut(Location::RegisterLocation(RAX));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004508 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4509 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004510}
4511
4512void InstructionCodeGeneratorX86_64::VisitNewArray(HNewArray* instruction) {
Vladimir Markob5461632018-10-15 14:24:21 +01004513 // Note: if heap poisoning is enabled, the entry point takes care of poisoning the reference.
4514 QuickEntrypointEnum entrypoint = CodeGenerator::GetArrayAllocationEntrypoint(instruction);
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004515 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004516 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004517 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004518}
4519
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004520void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004521 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004522 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004523 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4524 if (location.IsStackSlot()) {
4525 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4526 } else if (location.IsDoubleStackSlot()) {
4527 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4528 }
4529 locations->SetOut(location);
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004530}
4531
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004532void InstructionCodeGeneratorX86_64::VisitParameterValue(
4533 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004534 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004535}
4536
4537void LocationsBuilderX86_64::VisitCurrentMethod(HCurrentMethod* instruction) {
4538 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004539 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004540 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4541}
4542
4543void InstructionCodeGeneratorX86_64::VisitCurrentMethod(
4544 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4545 // Nothing to do, the method is already at its location.
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004546}
4547
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004548void LocationsBuilderX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4549 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004550 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004551 locations->SetInAt(0, Location::RequiresRegister());
4552 locations->SetOut(Location::RequiresRegister());
4553}
4554
4555void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruction) {
4556 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004557 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004558 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004559 instruction->GetIndex(), kX86_64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004560 __ movq(locations->Out().AsRegister<CpuRegister>(),
4561 Address(locations->InAt(0).AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004562 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004563 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004564 instruction->GetIndex(), kX86_64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004565 __ movq(locations->Out().AsRegister<CpuRegister>(),
4566 Address(locations->InAt(0).AsRegister<CpuRegister>(),
4567 mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004568 __ movq(locations->Out().AsRegister<CpuRegister>(),
4569 Address(locations->Out().AsRegister<CpuRegister>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004570 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004571}
4572
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004573void LocationsBuilderX86_64::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004574 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004575 new (GetGraph()->GetAllocator()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00004576 locations->SetInAt(0, Location::RequiresRegister());
4577 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004578}
4579
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004580void InstructionCodeGeneratorX86_64::VisitNot(HNot* not_) {
4581 LocationSummary* locations = not_->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004582 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4583 locations->Out().AsRegister<CpuRegister>().AsRegister());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004584 Location out = locations->Out();
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004585 switch (not_->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004586 case DataType::Type::kInt32:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004587 __ notl(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004588 break;
4589
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004590 case DataType::Type::kInt64:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004591 __ notq(out.AsRegister<CpuRegister>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004592 break;
4593
4594 default:
4595 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4596 }
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004597}
4598
David Brazdil66d126e2015-04-03 16:02:44 +01004599void LocationsBuilderX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
4600 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004601 new (GetGraph()->GetAllocator()) LocationSummary(bool_not, LocationSummary::kNoCall);
David Brazdil66d126e2015-04-03 16:02:44 +01004602 locations->SetInAt(0, Location::RequiresRegister());
4603 locations->SetOut(Location::SameAsFirstInput());
4604}
4605
4606void InstructionCodeGeneratorX86_64::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004607 LocationSummary* locations = bool_not->GetLocations();
4608 DCHECK_EQ(locations->InAt(0).AsRegister<CpuRegister>().AsRegister(),
4609 locations->Out().AsRegister<CpuRegister>().AsRegister());
4610 Location out = locations->Out();
4611 __ xorl(out.AsRegister<CpuRegister>(), Immediate(1));
4612}
4613
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004614void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004615 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004616 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004617 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004618 locations->SetInAt(i, Location::Any());
4619 }
4620 locations->SetOut(Location::Any());
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004621}
4622
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004623void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01004624 LOG(FATAL) << "Unimplemented";
4625}
4626
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004627void CodeGeneratorX86_64::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004628 /*
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004629 * According to the JSR-133 Cookbook, for x86-64 only StoreLoad/AnyAny barriers need memory fence.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004630 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86-64 memory model.
Calin Juravle52c48962014-12-16 17:02:57 +00004631 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4632 */
4633 switch (kind) {
4634 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004635 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004636 break;
4637 }
4638 case MemBarrierKind::kAnyStore:
4639 case MemBarrierKind::kLoadAny:
4640 case MemBarrierKind::kStoreStore: {
4641 // nop
4642 break;
4643 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004644 case MemBarrierKind::kNTStoreStore:
4645 // Non-Temporal Store/Store needs an explicit fence.
Andreas Gampe3db70682018-12-26 15:12:03 -08004646 MemoryFence(/* non-temporal= */ true);
Mark Mendell7aa04a12016-01-27 22:39:07 -05004647 break;
Calin Juravle52c48962014-12-16 17:02:57 +00004648 }
4649}
4650
4651void LocationsBuilderX86_64::HandleFieldGet(HInstruction* instruction) {
4652 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4653
Roland Levillain0d5a2812015-11-13 10:07:31 +00004654 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004655 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004656 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004657 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
4658 object_field_get_with_read_barrier
4659 ? LocationSummary::kCallOnSlowPath
4660 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004661 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004662 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004663 }
Calin Juravle52c48962014-12-16 17:02:57 +00004664 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004665 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004666 locations->SetOut(Location::RequiresFpuRegister());
4667 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004668 // The output overlaps for an object field get when read barriers
4669 // are enabled: we do not want the move to overwrite the object's
4670 // location, as we need it to emit the read barrier.
4671 locations->SetOut(
4672 Location::RequiresRegister(),
4673 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004674 }
Calin Juravle52c48962014-12-16 17:02:57 +00004675}
4676
4677void InstructionCodeGeneratorX86_64::HandleFieldGet(HInstruction* instruction,
4678 const FieldInfo& field_info) {
4679 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
4680
4681 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004682 Location base_loc = locations->InAt(0);
4683 CpuRegister base = base_loc.AsRegister<CpuRegister>();
Calin Juravle52c48962014-12-16 17:02:57 +00004684 Location out = locations->Out();
4685 bool is_volatile = field_info.IsVolatile();
Vladimir Marko61b92282017-10-11 13:23:17 +01004686 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4687 DataType::Type load_type = instruction->GetType();
Calin Juravle52c48962014-12-16 17:02:57 +00004688 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4689
Vladimir Marko61b92282017-10-11 13:23:17 +01004690 switch (load_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004691 case DataType::Type::kBool:
4692 case DataType::Type::kUint8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004693 __ movzxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4694 break;
4695 }
4696
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004697 case DataType::Type::kInt8: {
Calin Juravle52c48962014-12-16 17:02:57 +00004698 __ movsxb(out.AsRegister<CpuRegister>(), Address(base, offset));
4699 break;
4700 }
4701
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004702 case DataType::Type::kUint16: {
4703 __ movzxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004704 break;
4705 }
4706
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004707 case DataType::Type::kInt16: {
4708 __ movsxw(out.AsRegister<CpuRegister>(), Address(base, offset));
Calin Juravle52c48962014-12-16 17:02:57 +00004709 break;
4710 }
4711
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004712 case DataType::Type::kInt32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004713 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4714 break;
4715 }
4716
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004717 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004718 // /* HeapReference<Object> */ out = *(base + offset)
4719 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004720 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01004721 // CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004722 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08004723 instruction, out, base, offset, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004724 if (is_volatile) {
4725 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4726 }
4727 } else {
4728 __ movl(out.AsRegister<CpuRegister>(), Address(base, offset));
4729 codegen_->MaybeRecordImplicitNullCheck(instruction);
4730 if (is_volatile) {
4731 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4732 }
4733 // If read barriers are enabled, emit read barriers other than
4734 // Baker's using a slow path (and also unpoison the loaded
4735 // reference, if heap poisoning is enabled).
4736 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4737 }
4738 break;
4739 }
4740
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004741 case DataType::Type::kInt64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004742 __ movq(out.AsRegister<CpuRegister>(), Address(base, offset));
4743 break;
4744 }
4745
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004746 case DataType::Type::kFloat32: {
Calin Juravle52c48962014-12-16 17:02:57 +00004747 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4748 break;
4749 }
4750
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004751 case DataType::Type::kFloat64: {
Calin Juravle52c48962014-12-16 17:02:57 +00004752 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
4753 break;
4754 }
4755
Aart Bik66c158e2018-01-31 12:55:04 -08004756 case DataType::Type::kUint32:
4757 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004758 case DataType::Type::kVoid:
Vladimir Marko61b92282017-10-11 13:23:17 +01004759 LOG(FATAL) << "Unreachable type " << load_type;
Calin Juravle52c48962014-12-16 17:02:57 +00004760 UNREACHABLE();
4761 }
4762
Vladimir Marko61b92282017-10-11 13:23:17 +01004763 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004764 // Potential implicit null checks, in the case of reference
4765 // fields, are handled in the previous switch statement.
4766 } else {
4767 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004768 }
Roland Levillain4d027112015-07-01 15:41:14 +01004769
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004770 if (is_volatile) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004771 if (load_type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004772 // Memory barriers, in the case of references, are also handled
4773 // in the previous switch statement.
4774 } else {
4775 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4776 }
Roland Levillain4d027112015-07-01 15:41:14 +01004777 }
Calin Juravle52c48962014-12-16 17:02:57 +00004778}
4779
4780void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
4781 const FieldInfo& field_info) {
4782 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4783
4784 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004785 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004786 DataType::Type field_type = field_info.GetFieldType();
Mark Mendellea5af682015-10-22 17:35:49 -04004787 bool is_volatile = field_info.IsVolatile();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004788 bool needs_write_barrier =
Roland Levillain4d027112015-07-01 15:41:14 +01004789 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004790
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004791 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004792 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Mark Mendellea5af682015-10-22 17:35:49 -04004793 if (is_volatile) {
4794 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4795 locations->SetInAt(1, Location::FpuRegisterOrInt32Constant(instruction->InputAt(1)));
4796 } else {
4797 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4798 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004799 } else {
Mark Mendellea5af682015-10-22 17:35:49 -04004800 if (is_volatile) {
4801 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4802 locations->SetInAt(1, Location::RegisterOrInt32Constant(instruction->InputAt(1)));
4803 } else {
4804 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4805 }
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004806 }
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00004807 if (needs_write_barrier) {
Nicolas Geoffray9ae0daa2014-09-30 22:40:23 +01004808 // Temporary registers for the write barrier.
Roland Levillain4d027112015-07-01 15:41:14 +01004809 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004810 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004811 } else if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004812 // Temporary register for the reference poisoning.
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004813 locations->AddTemp(Location::RequiresRegister());
4814 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004815}
4816
Calin Juravle52c48962014-12-16 17:02:57 +00004817void InstructionCodeGeneratorX86_64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004818 const FieldInfo& field_info,
4819 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004820 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4821
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004822 LocationSummary* locations = instruction->GetLocations();
Calin Juravle52c48962014-12-16 17:02:57 +00004823 CpuRegister base = locations->InAt(0).AsRegister<CpuRegister>();
4824 Location value = locations->InAt(1);
4825 bool is_volatile = field_info.IsVolatile();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004826 DataType::Type field_type = field_info.GetFieldType();
Calin Juravle52c48962014-12-16 17:02:57 +00004827 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4828
4829 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004830 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004831 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004832
Mark Mendellea5af682015-10-22 17:35:49 -04004833 bool maybe_record_implicit_null_check_done = false;
4834
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004835 switch (field_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004836 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004837 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004838 case DataType::Type::kInt8: {
Mark Mendell40741f32015-04-20 22:10:34 -04004839 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004840 __ movb(Address(base, offset),
4841 Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004842 } else {
4843 __ movb(Address(base, offset), value.AsRegister<CpuRegister>());
4844 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004845 break;
4846 }
4847
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004848 case DataType::Type::kUint16:
4849 case DataType::Type::kInt16: {
Mark Mendell40741f32015-04-20 22:10:34 -04004850 if (value.IsConstant()) {
Nicolas Geoffray78612082017-07-24 14:18:53 +01004851 __ movw(Address(base, offset),
4852 Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Mark Mendell40741f32015-04-20 22:10:34 -04004853 } else {
4854 __ movw(Address(base, offset), value.AsRegister<CpuRegister>());
4855 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004856 break;
4857 }
4858
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004859 case DataType::Type::kInt32:
4860 case DataType::Type::kReference: {
Mark Mendell40741f32015-04-20 22:10:34 -04004861 if (value.IsConstant()) {
4862 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004863 // `field_type == DataType::Type::kReference` implies `v == 0`.
4864 DCHECK((field_type != DataType::Type::kReference) || (v == 0));
Roland Levillain4d027112015-07-01 15:41:14 +01004865 // Note: if heap poisoning is enabled, no need to poison
4866 // (negate) `v` if it is a reference, as it would be null.
Roland Levillain06b66d02015-07-01 12:47:25 +01004867 __ movl(Address(base, offset), Immediate(v));
Mark Mendell40741f32015-04-20 22:10:34 -04004868 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004869 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01004870 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4871 __ movl(temp, value.AsRegister<CpuRegister>());
4872 __ PoisonHeapReference(temp);
4873 __ movl(Address(base, offset), temp);
4874 } else {
4875 __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
4876 }
Mark Mendell40741f32015-04-20 22:10:34 -04004877 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004878 break;
4879 }
4880
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004881 case DataType::Type::kInt64: {
Mark Mendell40741f32015-04-20 22:10:34 -04004882 if (value.IsConstant()) {
4883 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellea5af682015-10-22 17:35:49 -04004884 codegen_->MoveInt64ToAddress(Address(base, offset),
4885 Address(base, offset + sizeof(int32_t)),
4886 v,
4887 instruction);
4888 maybe_record_implicit_null_check_done = true;
Mark Mendell40741f32015-04-20 22:10:34 -04004889 } else {
4890 __ movq(Address(base, offset), value.AsRegister<CpuRegister>());
4891 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004892 break;
4893 }
4894
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004895 case DataType::Type::kFloat32: {
Mark Mendellea5af682015-10-22 17:35:49 -04004896 if (value.IsConstant()) {
4897 int32_t v =
4898 bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
4899 __ movl(Address(base, offset), Immediate(v));
4900 } else {
4901 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4902 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004903 break;
4904 }
4905
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004906 case DataType::Type::kFloat64: {
Mark Mendellea5af682015-10-22 17:35:49 -04004907 if (value.IsConstant()) {
4908 int64_t v =
4909 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
4910 codegen_->MoveInt64ToAddress(Address(base, offset),
4911 Address(base, offset + sizeof(int32_t)),
4912 v,
4913 instruction);
4914 maybe_record_implicit_null_check_done = true;
4915 } else {
4916 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4917 }
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004918 break;
4919 }
4920
Aart Bik66c158e2018-01-31 12:55:04 -08004921 case DataType::Type::kUint32:
4922 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004923 case DataType::Type::kVoid:
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004924 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004925 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004926 }
Calin Juravle52c48962014-12-16 17:02:57 +00004927
Mark Mendellea5af682015-10-22 17:35:49 -04004928 if (!maybe_record_implicit_null_check_done) {
4929 codegen_->MaybeRecordImplicitNullCheck(instruction);
4930 }
Calin Juravle77520bc2015-01-12 18:45:46 +00004931
4932 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4933 CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
4934 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004935 codegen_->MarkGCCard(temp, card, base, value.AsRegister<CpuRegister>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00004936 }
4937
Calin Juravle52c48962014-12-16 17:02:57 +00004938 if (is_volatile) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00004939 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00004940 }
4941}
4942
4943void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4944 HandleFieldSet(instruction, instruction->GetFieldInfo());
4945}
4946
4947void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004948 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004949}
4950
4951void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004952 HandleFieldGet(instruction);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004953}
4954
4955void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Calin Juravle52c48962014-12-16 17:02:57 +00004956 HandleFieldGet(instruction, instruction->GetFieldInfo());
4957}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004958
Calin Juravle52c48962014-12-16 17:02:57 +00004959void LocationsBuilderX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4960 HandleFieldGet(instruction);
4961}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004962
Calin Juravle52c48962014-12-16 17:02:57 +00004963void InstructionCodeGeneratorX86_64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4964 HandleFieldGet(instruction, instruction->GetFieldInfo());
4965}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004966
Calin Juravle52c48962014-12-16 17:02:57 +00004967void LocationsBuilderX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4968 HandleFieldSet(instruction, instruction->GetFieldInfo());
4969}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004970
Calin Juravle52c48962014-12-16 17:02:57 +00004971void InstructionCodeGeneratorX86_64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004972 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004973}
4974
Vladimir Marko552a1342017-10-31 10:56:47 +00004975void LocationsBuilderX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4976 codegen_->CreateStringBuilderAppendLocations(instruction, Location::RegisterLocation(RAX));
4977}
4978
4979void InstructionCodeGeneratorX86_64::VisitStringBuilderAppend(HStringBuilderAppend* instruction) {
4980 __ movl(CpuRegister(RDI), Immediate(instruction->GetFormat()->GetValue()));
4981 codegen_->InvokeRuntime(kQuickStringBuilderAppend, instruction, instruction->GetDexPc());
4982}
4983
Calin Juravlee460d1d2015-09-29 04:52:17 +01004984void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
4985 HUnresolvedInstanceFieldGet* instruction) {
4986 FieldAccessCallingConventionX86_64 calling_convention;
4987 codegen_->CreateUnresolvedFieldLocationSummary(
4988 instruction, instruction->GetFieldType(), calling_convention);
4989}
4990
4991void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
4992 HUnresolvedInstanceFieldGet* instruction) {
4993 FieldAccessCallingConventionX86_64 calling_convention;
4994 codegen_->GenerateUnresolvedFieldAccess(instruction,
4995 instruction->GetFieldType(),
4996 instruction->GetFieldIndex(),
4997 instruction->GetDexPc(),
4998 calling_convention);
4999}
5000
5001void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
5002 HUnresolvedInstanceFieldSet* instruction) {
5003 FieldAccessCallingConventionX86_64 calling_convention;
5004 codegen_->CreateUnresolvedFieldLocationSummary(
5005 instruction, instruction->GetFieldType(), calling_convention);
5006}
5007
5008void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
5009 HUnresolvedInstanceFieldSet* instruction) {
5010 FieldAccessCallingConventionX86_64 calling_convention;
5011 codegen_->GenerateUnresolvedFieldAccess(instruction,
5012 instruction->GetFieldType(),
5013 instruction->GetFieldIndex(),
5014 instruction->GetDexPc(),
5015 calling_convention);
5016}
5017
5018void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
5019 HUnresolvedStaticFieldGet* instruction) {
5020 FieldAccessCallingConventionX86_64 calling_convention;
5021 codegen_->CreateUnresolvedFieldLocationSummary(
5022 instruction, instruction->GetFieldType(), calling_convention);
5023}
5024
5025void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
5026 HUnresolvedStaticFieldGet* instruction) {
5027 FieldAccessCallingConventionX86_64 calling_convention;
5028 codegen_->GenerateUnresolvedFieldAccess(instruction,
5029 instruction->GetFieldType(),
5030 instruction->GetFieldIndex(),
5031 instruction->GetDexPc(),
5032 calling_convention);
5033}
5034
5035void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
5036 HUnresolvedStaticFieldSet* instruction) {
5037 FieldAccessCallingConventionX86_64 calling_convention;
5038 codegen_->CreateUnresolvedFieldLocationSummary(
5039 instruction, instruction->GetFieldType(), calling_convention);
5040}
5041
5042void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
5043 HUnresolvedStaticFieldSet* instruction) {
5044 FieldAccessCallingConventionX86_64 calling_convention;
5045 codegen_->GenerateUnresolvedFieldAccess(instruction,
5046 instruction->GetFieldType(),
5047 instruction->GetFieldIndex(),
5048 instruction->GetDexPc(),
5049 calling_convention);
5050}
5051
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005052void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005053 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5054 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5055 ? Location::RequiresRegister()
5056 : Location::Any();
5057 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005058}
5059
Calin Juravle2ae48182016-03-16 14:05:09 +00005060void CodeGeneratorX86_64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5061 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005062 return;
5063 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005064 LocationSummary* locations = instruction->GetLocations();
5065 Location obj = locations->InAt(0);
5066
5067 __ testl(CpuRegister(RAX), Address(obj.AsRegister<CpuRegister>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005068 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005069}
5070
Calin Juravle2ae48182016-03-16 14:05:09 +00005071void CodeGeneratorX86_64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005072 SlowPathCode* slow_path = new (GetScopedAllocator()) NullCheckSlowPathX86_64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005073 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005074
5075 LocationSummary* locations = instruction->GetLocations();
5076 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005077
5078 if (obj.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005079 __ testl(obj.AsRegister<CpuRegister>(), obj.AsRegister<CpuRegister>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005080 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005081 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005082 } else {
5083 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005084 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005085 __ jmp(slow_path->GetEntryLabel());
5086 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005087 }
5088 __ j(kEqual, slow_path->GetEntryLabel());
5089}
5090
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005091void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005092 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005093}
5094
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005095void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005096 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005097 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005098 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005099 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
5100 object_array_get_with_read_barrier
5101 ? LocationSummary::kCallOnSlowPath
5102 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005103 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005104 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005105 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005106 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04005107 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005108 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005109 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5110 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005111 // The output overlaps for an object array get when read barriers
5112 // are enabled: we do not want the move to overwrite the array's
5113 // location, as we need it to emit the read barrier.
5114 locations->SetOut(
5115 Location::RequiresRegister(),
5116 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005117 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005118}
5119
5120void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
5121 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005122 Location obj_loc = locations->InAt(0);
5123 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005124 Location index = locations->InAt(1);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005125 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005126 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005127
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005128 DataType::Type type = instruction->GetType();
Roland Levillain4d027112015-07-01 15:41:14 +01005129 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005130 case DataType::Type::kBool:
5131 case DataType::Type::kUint8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005132 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005133 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005134 break;
5135 }
5136
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005137 case DataType::Type::kInt8: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005138 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005139 __ movsxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005140 break;
5141 }
5142
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005143 case DataType::Type::kUint16: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005144 CpuRegister out = out_loc.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07005145 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5146 // Branch cases into compressed and uncompressed for each index's type.
5147 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5148 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005149 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005150 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005151 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5152 "Expecting 0=compressed, 1=uncompressed");
5153 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005154 __ movzxb(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_1, data_offset));
5155 __ jmp(&done);
5156 __ Bind(&not_compressed);
5157 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5158 __ Bind(&done);
5159 } else {
5160 __ movzxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5161 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005162 break;
5163 }
5164
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005165 case DataType::Type::kInt16: {
5166 CpuRegister out = out_loc.AsRegister<CpuRegister>();
5167 __ movsxw(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_2, data_offset));
5168 break;
5169 }
5170
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005171 case DataType::Type::kInt32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005172 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005173 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005174 break;
5175 }
5176
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005177 case DataType::Type::kReference: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005178 static_assert(
5179 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5180 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005181 // /* HeapReference<Object> */ out =
5182 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5183 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005184 // Note that a potential implicit null check is handled in this
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005185 // CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier call.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005186 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08005187 instruction, out_loc, obj, data_offset, index, /* needs_null_check= */ true);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005188 } else {
5189 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005190 __ movl(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
5191 codegen_->MaybeRecordImplicitNullCheck(instruction);
5192 // If read barriers are enabled, emit read barriers other than
5193 // Baker's using a slow path (and also unpoison the loaded
5194 // reference, if heap poisoning is enabled).
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005195 if (index.IsConstant()) {
5196 uint32_t offset =
5197 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005198 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5199 } else {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005200 codegen_->MaybeGenerateReadBarrierSlow(
5201 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5202 }
5203 }
5204 break;
5205 }
5206
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005207 case DataType::Type::kInt64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005208 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005209 __ movq(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005210 break;
5211 }
5212
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005213 case DataType::Type::kFloat32: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005214 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005215 __ movss(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005216 break;
5217 }
5218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005219 case DataType::Type::kFloat64: {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005220 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005221 __ movsd(out, CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_8, data_offset));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005222 break;
5223 }
5224
Aart Bik66c158e2018-01-31 12:55:04 -08005225 case DataType::Type::kUint32:
5226 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005227 case DataType::Type::kVoid:
Roland Levillain4d027112015-07-01 15:41:14 +01005228 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005229 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005230 }
Roland Levillain4d027112015-07-01 15:41:14 +01005231
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005232 if (type == DataType::Type::kReference) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005233 // Potential implicit null checks, in the case of reference
5234 // arrays, are handled in the previous switch statement.
5235 } else {
5236 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01005237 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005238}
5239
5240void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005241 DataType::Type value_type = instruction->GetComponentType();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005242
5243 bool needs_write_barrier =
5244 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005245 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005246
Vladimir Markoca6fff82017-10-03 14:49:14 +01005247 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005248 instruction,
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005249 needs_type_check ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005250
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005251 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellea5af682015-10-22 17:35:49 -04005252 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005253 if (DataType::IsFloatingPointType(value_type)) {
Mark Mendellea5af682015-10-22 17:35:49 -04005254 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005255 } else {
5256 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5257 }
5258
5259 if (needs_write_barrier) {
5260 // Temporary registers for the write barrier.
Roland Levillain16d9f942016-08-25 17:27:56 +01005261 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005262 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005263 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005264}
5265
5266void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
5267 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005268 Location array_loc = locations->InAt(0);
5269 CpuRegister array = array_loc.AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005270 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005271 Location value = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005272 DataType::Type value_type = instruction->GetComponentType();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005273 bool needs_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005274 bool needs_write_barrier =
5275 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005276
5277 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005278 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005279 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005280 case DataType::Type::kInt8: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005281 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005282 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005283 if (value.IsRegister()) {
5284 __ movb(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005285 } else {
Nicolas Geoffray78612082017-07-24 14:18:53 +01005286 __ movb(address, Immediate(CodeGenerator::GetInt8ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005287 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005288 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005289 break;
5290 }
5291
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005292 case DataType::Type::kUint16:
5293 case DataType::Type::kInt16: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005294 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005295 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005296 if (value.IsRegister()) {
5297 __ movw(address, value.AsRegister<CpuRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005298 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005299 DCHECK(value.IsConstant()) << value;
Nicolas Geoffray78612082017-07-24 14:18:53 +01005300 __ movw(address, Immediate(CodeGenerator::GetInt16ValueOf(value.GetConstant())));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005301 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005302 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005303 break;
5304 }
5305
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005306 case DataType::Type::kReference: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005307 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005308 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005309
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005310 if (!value.IsRegister()) {
5311 // Just setting null.
5312 DCHECK(instruction->InputAt(2)->IsNullConstant());
5313 DCHECK(value.IsConstant()) << value;
5314 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005315 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005316 DCHECK(!needs_write_barrier);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005317 DCHECK(!needs_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005318 break;
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005319 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005320
5321 DCHECK(needs_write_barrier);
5322 CpuRegister register_value = value.AsRegister<CpuRegister>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005323 Location temp_loc = locations->GetTemp(0);
5324 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005325
5326 bool can_value_be_null = instruction->GetValueCanBeNull();
5327 NearLabel do_store;
5328 if (can_value_be_null) {
5329 __ testl(register_value, register_value);
5330 __ j(kEqual, &do_store);
5331 }
5332
5333 SlowPathCode* slow_path = nullptr;
5334 if (needs_type_check) {
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005335 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathX86_64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005336 codegen_->AddSlowPath(slow_path);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005337
5338 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5339 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5340 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005341
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005342 // Note that when Baker read barriers are enabled, the type
5343 // checks are performed without read barriers. This is fine,
5344 // even in the case where a class object is in the from-space
5345 // after the flip, as a comparison involving such a type would
5346 // not produce a false positive; it may of course produce a
5347 // false negative, in which case we would take the ArraySet
5348 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005349
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005350 // /* HeapReference<Class> */ temp = array->klass_
5351 __ movl(temp, Address(array, class_offset));
5352 codegen_->MaybeRecordImplicitNullCheck(instruction);
5353 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005354
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005355 // /* HeapReference<Class> */ temp = temp->component_type_
5356 __ movl(temp, Address(temp, component_offset));
5357 // If heap poisoning is enabled, no need to unpoison `temp`
5358 // nor the object reference in `register_value->klass`, as
5359 // we are comparing two poisoned references.
5360 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005361
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005362 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005363 NearLabel do_put;
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005364 __ j(kEqual, &do_put);
5365 // If heap poisoning is enabled, the `temp` reference has
5366 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005367 __ MaybeUnpoisonHeapReference(temp);
5368
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005369 // If heap poisoning is enabled, no need to unpoison the
5370 // heap reference loaded below, as it is only used for a
5371 // comparison with null.
5372 __ cmpl(Address(temp, super_offset), Immediate(0));
5373 __ j(kNotEqual, slow_path->GetEntryLabel());
5374 __ Bind(&do_put);
5375 } else {
5376 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005377 }
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005378 }
5379
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005380 CpuRegister card = locations->GetTemp(1).AsRegister<CpuRegister>();
5381 codegen_->MarkGCCard(
5382 temp, card, array, value.AsRegister<CpuRegister>(), /* value_can_be_null= */ false);
5383
5384 if (can_value_be_null) {
5385 DCHECK(do_store.IsLinked());
5386 __ Bind(&do_store);
5387 }
5388
5389 Location source = value;
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005390 if (kPoisonHeapReferences) {
5391 __ movl(temp, register_value);
5392 __ PoisonHeapReference(temp);
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005393 source = temp_loc;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005394 }
5395
Vladimir Marko8fa839c2019-05-16 12:50:47 +00005396 __ movl(address, source.AsRegister<CpuRegister>());
5397
5398 if (can_value_be_null || !needs_type_check) {
5399 codegen_->MaybeRecordImplicitNullCheck(instruction);
5400 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005401
Vladimir Marko0dda8c82019-05-16 12:47:40 +00005402 if (slow_path != nullptr) {
5403 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005404 }
5405
5406 break;
5407 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005409 case DataType::Type::kInt32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005410 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005411 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005412 if (value.IsRegister()) {
5413 __ movl(address, value.AsRegister<CpuRegister>());
5414 } else {
5415 DCHECK(value.IsConstant()) << value;
5416 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5417 __ movl(address, Immediate(v));
5418 }
5419 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005420 break;
5421 }
5422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005423 case DataType::Type::kInt64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005424 uint32_t offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005425 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005426 if (value.IsRegister()) {
5427 __ movq(address, value.AsRegister<CpuRegister>());
Mark Mendellea5af682015-10-22 17:35:49 -04005428 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005429 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005430 int64_t v = value.GetConstant()->AsLongConstant()->GetValue();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005431 Address address_high =
5432 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005433 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005434 }
5435 break;
5436 }
5437
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005438 case DataType::Type::kFloat32: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005439 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005440 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005441 if (value.IsFpuRegister()) {
5442 __ movss(address, value.AsFpuRegister<XmmRegister>());
5443 } else {
5444 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005445 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
Mark Mendellea5af682015-10-22 17:35:49 -04005446 __ movl(address, Immediate(v));
5447 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005448 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005449 break;
5450 }
5451
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005452 case DataType::Type::kFloat64: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005453 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005454 Address address = CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendellea5af682015-10-22 17:35:49 -04005455 if (value.IsFpuRegister()) {
5456 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5457 codegen_->MaybeRecordImplicitNullCheck(instruction);
5458 } else {
5459 int64_t v =
5460 bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005461 Address address_high =
5462 CodeGeneratorX86_64::ArrayAddress(array, index, TIMES_8, offset + sizeof(int32_t));
Mark Mendellea5af682015-10-22 17:35:49 -04005463 codegen_->MoveInt64ToAddress(address, address_high, v, instruction);
5464 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005465 break;
5466 }
5467
Aart Bik66c158e2018-01-31 12:55:04 -08005468 case DataType::Type::kUint32:
5469 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005470 case DataType::Type::kVoid:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005471 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005472 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005473 }
5474}
5475
5476void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005477 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005478 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005479 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005480 if (!instruction->IsEmittedAtUseSite()) {
5481 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5482 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005483}
5484
5485void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005486 if (instruction->IsEmittedAtUseSite()) {
5487 return;
5488 }
5489
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005490 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005491 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005492 CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
5493 CpuRegister out = locations->Out().AsRegister<CpuRegister>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005494 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005495 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005496 // Mask out most significant bit in case the array is String's array of char.
5497 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005498 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005499 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005500}
5501
5502void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005503 RegisterSet caller_saves = RegisterSet::Empty();
5504 InvokeRuntimeCallingConvention calling_convention;
5505 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5506 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5507 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005508 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005509 HInstruction* length = instruction->InputAt(1);
5510 if (!length->IsEmittedAtUseSite()) {
5511 locations->SetInAt(1, Location::RegisterOrConstant(length));
5512 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005513}
5514
5515void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
5516 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005517 Location index_loc = locations->InAt(0);
5518 Location length_loc = locations->InAt(1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005519 SlowPathCode* slow_path =
5520 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathX86_64(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005521
Mark Mendell99dbd682015-04-22 16:18:52 -04005522 if (length_loc.IsConstant()) {
5523 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5524 if (index_loc.IsConstant()) {
5525 // BCE will remove the bounds check if we are guarenteed to pass.
5526 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5527 if (index < 0 || index >= length) {
5528 codegen_->AddSlowPath(slow_path);
5529 __ jmp(slow_path->GetEntryLabel());
5530 } else {
5531 // Some optimization after BCE may have generated this, and we should not
5532 // generate a bounds check if it is a valid range.
5533 }
5534 return;
5535 }
5536
5537 // We have to reverse the jump condition because the length is the constant.
5538 CpuRegister index_reg = index_loc.AsRegister<CpuRegister>();
5539 __ cmpl(index_reg, Immediate(length));
5540 codegen_->AddSlowPath(slow_path);
5541 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005542 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005543 HInstruction* array_length = instruction->InputAt(1);
5544 if (array_length->IsEmittedAtUseSite()) {
5545 // Address the length field in the array.
5546 DCHECK(array_length->IsArrayLength());
5547 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5548 Location array_loc = array_length->GetLocations()->InAt(0);
5549 Address array_len(array_loc.AsRegister<CpuRegister>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005550 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005551 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5552 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005553 CpuRegister length_reg = CpuRegister(TMP);
5554 __ movl(length_reg, array_len);
5555 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005556 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005557 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005558 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005559 // Checking the bound for general case:
5560 // Array of char or String's array when the compression feature off.
5561 if (index_loc.IsConstant()) {
5562 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5563 __ cmpl(array_len, Immediate(value));
5564 } else {
5565 __ cmpl(array_len, index_loc.AsRegister<CpuRegister>());
5566 }
5567 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005568 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005569 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005570 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005571 }
5572 codegen_->AddSlowPath(slow_path);
5573 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005574 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005575}
5576
5577void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
5578 CpuRegister card,
5579 CpuRegister object,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005580 CpuRegister value,
5581 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04005582 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005583 if (value_can_be_null) {
5584 __ testl(value, value);
5585 __ j(kEqual, &is_null);
5586 }
Roland Levillainc73f0522018-08-14 15:16:50 +01005587 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07005588 __ gs()->movq(card, Address::Absolute(Thread::CardTableOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005589 /* no_rip= */ true));
Roland Levillainc73f0522018-08-14 15:16:50 +01005590 // Calculate the offset (in the card table) of the card corresponding to
5591 // `object`.
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005592 __ movq(temp, object);
5593 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
Roland Levillainc73f0522018-08-14 15:16:50 +01005594 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
5595 // `object`'s card.
5596 //
5597 // Register `card` contains the address of the card table. Note that the card
5598 // table's base is biased during its creation so that it always starts at an
5599 // address whose least-significant byte is equal to `kCardDirty` (see
5600 // art::gc::accounting::CardTable::Create). Therefore the MOVB instruction
5601 // below writes the `kCardDirty` (byte) value into the `object`'s card
5602 // (located at `card + object >> kCardShift`).
5603 //
5604 // This dual use of the value in register `card` (1. to calculate the location
5605 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
5606 // (no need to explicitly load `kCardDirty` as an immediate value).
Roland Levillain4d027112015-07-01 15:41:14 +01005607 __ movb(Address(temp, card, TIMES_1, 0), card);
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005608 if (value_can_be_null) {
5609 __ Bind(&is_null);
5610 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005611}
5612
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005613void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005614 LOG(FATAL) << "Unimplemented";
5615}
5616
5617void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005618 if (instruction->GetNext()->IsSuspendCheck() &&
5619 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5620 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5621 // The back edge will generate the suspend check.
5622 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5623 }
5624
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005625 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5626}
5627
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005628void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005629 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5630 instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005631 // In suspend check slow path, usually there are no caller-save registers at all.
5632 // If SIMD instructions are present, however, we force spilling all live SIMD
5633 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005634 locations->SetCustomSlowPathCallerSaves(
5635 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005636}
5637
5638void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005639 HBasicBlock* block = instruction->GetBlock();
5640 if (block->GetLoopInformation() != nullptr) {
5641 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5642 // The back edge will generate the suspend check.
5643 return;
5644 }
5645 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5646 // The goto will generate the suspend check.
5647 return;
5648 }
5649 GenerateSuspendCheck(instruction, nullptr);
5650}
5651
5652void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
5653 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005654 SuspendCheckSlowPathX86_64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005655 down_cast<SuspendCheckSlowPathX86_64*>(instruction->GetSlowPath());
5656 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005657 slow_path =
5658 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathX86_64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005659 instruction->SetSlowPath(slow_path);
5660 codegen_->AddSlowPath(slow_path);
5661 if (successor != nullptr) {
5662 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005663 }
5664 } else {
5665 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5666 }
5667
Andreas Gampe542451c2016-07-26 09:02:02 -07005668 __ gs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08005669 /* no_rip= */ true),
Roland Levillain1e7f8db2015-12-15 10:54:19 +00005670 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005671 if (successor == nullptr) {
5672 __ j(kNotEqual, slow_path->GetEntryLabel());
5673 __ Bind(slow_path->GetReturnLabel());
5674 } else {
5675 __ j(kEqual, codegen_->GetLabelOf(successor));
5676 __ jmp(slow_path->GetEntryLabel());
5677 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005678}
5679
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005680X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
5681 return codegen_->GetAssembler();
5682}
5683
5684void ParallelMoveResolverX86_64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005685 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005686 Location source = move->GetSource();
5687 Location destination = move->GetDestination();
5688
5689 if (source.IsRegister()) {
5690 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005691 __ movq(destination.AsRegister<CpuRegister>(), source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005692 } else if (destination.IsStackSlot()) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005693 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005694 source.AsRegister<CpuRegister>());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005695 } else {
5696 DCHECK(destination.IsDoubleStackSlot());
5697 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005698 source.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005699 }
5700 } else if (source.IsStackSlot()) {
5701 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005702 __ movl(destination.AsRegister<CpuRegister>(),
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005703 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005704 } else if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005705 __ movss(destination.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005706 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005707 } else {
5708 DCHECK(destination.IsStackSlot());
5709 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5710 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5711 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005712 } else if (source.IsDoubleStackSlot()) {
5713 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005714 __ movq(destination.AsRegister<CpuRegister>(),
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005715 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005716 } else if (destination.IsFpuRegister()) {
Roland Levillain199f3362014-11-27 17:15:16 +00005717 __ movsd(destination.AsFpuRegister<XmmRegister>(),
5718 Address(CpuRegister(RSP), source.GetStackIndex()));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005719 } else {
Nicolas Geoffrayc8147a72014-10-21 16:06:20 +01005720 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005721 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5722 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5723 }
Aart Bik5576f372017-03-23 16:17:37 -07005724 } else if (source.IsSIMDStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005725 if (destination.IsFpuRegister()) {
5726 __ movups(destination.AsFpuRegister<XmmRegister>(),
5727 Address(CpuRegister(RSP), source.GetStackIndex()));
5728 } else {
5729 DCHECK(destination.IsSIMDStackSlot());
5730 size_t high = kX86_64WordSize;
5731 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
5732 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
5733 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex() + high));
5734 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex() + high), CpuRegister(TMP));
5735 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005736 } else if (source.IsConstant()) {
5737 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005738 if (constant->IsIntConstant() || constant->IsNullConstant()) {
5739 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005740 if (destination.IsRegister()) {
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005741 if (value == 0) {
5742 __ xorl(destination.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
5743 } else {
5744 __ movl(destination.AsRegister<CpuRegister>(), Immediate(value));
5745 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005746 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005747 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray748f1402015-01-27 08:17:54 +00005748 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005749 }
5750 } else if (constant->IsLongConstant()) {
5751 int64_t value = constant->AsLongConstant()->GetValue();
5752 if (destination.IsRegister()) {
Mark Mendell92e83bf2015-05-07 11:25:03 -04005753 codegen_->Load64BitValue(destination.AsRegister<CpuRegister>(), value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005754 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005755 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005756 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005757 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005758 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005759 float fp_value = constant->AsFloatConstant()->GetValue();
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005760 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005761 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005762 codegen_->Load32BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005763 } else {
5764 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005765 Immediate imm(bit_cast<int32_t, float>(fp_value));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005766 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
5767 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005768 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005769 DCHECK(constant->IsDoubleConstant()) << constant->DebugName();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005770 double fp_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005771 int64_t value = bit_cast<int64_t, double>(fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005772 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005773 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
Mark Mendell7c0b44f2016-02-01 10:08:35 -05005774 codegen_->Load64BitValue(dest, fp_value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005775 } else {
5776 DCHECK(destination.IsDoubleStackSlot()) << destination;
Mark Mendellcfa410b2015-05-25 16:02:44 -04005777 codegen_->Store64BitValueToStack(destination, value);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005778 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005779 }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005780 } else if (source.IsFpuRegister()) {
5781 if (destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005782 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005783 } else if (destination.IsStackSlot()) {
5784 __ movss(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005785 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005786 } else if (destination.IsDoubleStackSlot()) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005787 __ movsd(Address(CpuRegister(RSP), destination.GetStackIndex()),
Roland Levillain271ab9c2014-11-27 15:23:57 +00005788 source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005789 } else {
5790 DCHECK(destination.IsSIMDStackSlot());
5791 __ movups(Address(CpuRegister(RSP), destination.GetStackIndex()),
5792 source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005793 }
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005794 }
5795}
5796
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005797void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005798 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005799 __ movl(Address(CpuRegister(RSP), mem), reg);
5800 __ movl(reg, CpuRegister(TMP));
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005801}
5802
Mark Mendell8a1c7282015-06-29 15:41:28 -04005803void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg1, CpuRegister reg2) {
5804 __ movq(CpuRegister(TMP), reg1);
5805 __ movq(reg1, reg2);
5806 __ movq(reg2, CpuRegister(TMP));
5807}
5808
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005809void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
5810 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5811 __ movq(Address(CpuRegister(RSP), mem), reg);
5812 __ movq(reg, CpuRegister(TMP));
5813}
5814
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005815void ParallelMoveResolverX86_64::Exchange32(XmmRegister reg, int mem) {
5816 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5817 __ movss(Address(CpuRegister(RSP), mem), reg);
5818 __ movd(reg, CpuRegister(TMP));
5819}
5820
5821void ParallelMoveResolverX86_64::Exchange64(XmmRegister reg, int mem) {
5822 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
5823 __ movsd(Address(CpuRegister(RSP), mem), reg);
5824 __ movd(reg, CpuRegister(TMP));
5825}
5826
Aart Bikcfe50bb2017-12-12 14:54:12 -08005827void ParallelMoveResolverX86_64::Exchange128(XmmRegister reg, int mem) {
5828 size_t extra_slot = 2 * kX86_64WordSize;
5829 __ subq(CpuRegister(RSP), Immediate(extra_slot));
5830 __ movups(Address(CpuRegister(RSP), 0), XmmRegister(reg));
5831 ExchangeMemory64(0, mem + extra_slot, 2);
5832 __ movups(XmmRegister(reg), Address(CpuRegister(RSP), 0));
5833 __ addq(CpuRegister(RSP), Immediate(extra_slot));
5834}
5835
5836void ParallelMoveResolverX86_64::ExchangeMemory32(int mem1, int mem2) {
5837 ScratchRegisterScope ensure_scratch(
5838 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5839
5840 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5841 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
5842 __ movl(CpuRegister(ensure_scratch.GetRegister()),
5843 Address(CpuRegister(RSP), mem2 + stack_offset));
5844 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
5845 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
5846 CpuRegister(ensure_scratch.GetRegister()));
5847}
5848
5849void ParallelMoveResolverX86_64::ExchangeMemory64(int mem1, int mem2, int num_of_qwords) {
5850 ScratchRegisterScope ensure_scratch(
5851 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
5852
5853 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
5854
5855 // Now that temp registers are available (possibly spilled), exchange blocks of memory.
5856 for (int i = 0; i < num_of_qwords; i++) {
5857 __ movq(CpuRegister(TMP),
5858 Address(CpuRegister(RSP), mem1 + stack_offset));
5859 __ movq(CpuRegister(ensure_scratch.GetRegister()),
5860 Address(CpuRegister(RSP), mem2 + stack_offset));
5861 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset),
5862 CpuRegister(TMP));
5863 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
5864 CpuRegister(ensure_scratch.GetRegister()));
5865 stack_offset += kX86_64WordSize;
5866 }
5867}
5868
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005869void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005870 MoveOperands* move = moves_[index];
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005871 Location source = move->GetSource();
5872 Location destination = move->GetDestination();
5873
5874 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell8a1c7282015-06-29 15:41:28 -04005875 Exchange64(source.AsRegister<CpuRegister>(), destination.AsRegister<CpuRegister>());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005876 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005877 Exchange32(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005878 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005879 Exchange32(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005880 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005881 ExchangeMemory32(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005882 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005883 Exchange64(source.AsRegister<CpuRegister>(), destination.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005884 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005885 Exchange64(destination.AsRegister<CpuRegister>(), source.GetStackIndex());
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01005886 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
Aart Bikcfe50bb2017-12-12 14:54:12 -08005887 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 1);
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005888 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005889 __ movd(CpuRegister(TMP), source.AsFpuRegister<XmmRegister>());
5890 __ movaps(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5891 __ movd(destination.AsFpuRegister<XmmRegister>(), CpuRegister(TMP));
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005892 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005893 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005894 } else if (source.IsStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005895 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005896 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005897 Exchange64(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005898 } else if (source.IsDoubleStackSlot() && destination.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005899 Exchange64(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Aart Bikcfe50bb2017-12-12 14:54:12 -08005900 } else if (source.IsSIMDStackSlot() && destination.IsSIMDStackSlot()) {
5901 ExchangeMemory64(destination.GetStackIndex(), source.GetStackIndex(), 2);
5902 } else if (source.IsFpuRegister() && destination.IsSIMDStackSlot()) {
5903 Exchange128(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5904 } else if (destination.IsFpuRegister() && source.IsSIMDStackSlot()) {
5905 Exchange128(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005906 } else {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01005907 LOG(FATAL) << "Unimplemented swap between " << source << " and " << destination;
Nicolas Geoffrayecb2f9b2014-06-13 08:59:59 +00005908 }
5909}
5910
5911
5912void ParallelMoveResolverX86_64::SpillScratch(int reg) {
5913 __ pushq(CpuRegister(reg));
5914}
5915
5916
5917void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
5918 __ popq(CpuRegister(reg));
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01005919}
5920
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005921void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07005922 SlowPathCode* slow_path, CpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00005923 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
5924 const size_t status_byte_offset =
5925 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
Vladimir Markobf121912019-06-04 13:49:05 +01005926 constexpr uint32_t shifted_visibly_initialized_value =
5927 enum_cast<uint32_t>(ClassStatus::kVisiblyInitialized) << (status_lsb_position % kBitsPerByte);
Vladimir Markodc682aa2018-01-04 18:42:57 +00005928
Vladimir Markobf121912019-06-04 13:49:05 +01005929 __ cmpb(Address(class_reg, status_byte_offset), Immediate(shifted_visibly_initialized_value));
Vladimir Marko2c64a832018-01-04 11:31:56 +00005930 __ j(kBelow, slow_path->GetEntryLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005931 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00005932}
5933
Vladimir Marko175e7862018-03-27 09:03:13 +00005934void InstructionCodeGeneratorX86_64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
5935 CpuRegister temp) {
5936 uint32_t path_to_root = check->GetBitstringPathToRoot();
5937 uint32_t mask = check->GetBitstringMask();
5938 DCHECK(IsPowerOfTwo(mask + 1));
5939 size_t mask_bits = WhichPowerOf2(mask + 1);
5940
5941 if (mask_bits == 16u) {
5942 // Compare the bitstring in memory.
5943 __ cmpw(Address(temp, mirror::Class::StatusOffset()), Immediate(path_to_root));
5944 } else {
5945 // /* uint32_t */ temp = temp->status_
5946 __ movl(temp, Address(temp, mirror::Class::StatusOffset()));
5947 // Compare the bitstring bits using SUB.
5948 __ subl(temp, Immediate(path_to_root));
5949 // Shift out bits that do not contribute to the comparison.
5950 __ shll(temp, Immediate(32u - mask_bits));
5951 }
5952}
5953
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005954HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
5955 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005956 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005957 case HLoadClass::LoadKind::kInvalid:
5958 LOG(FATAL) << "UNREACHABLE";
5959 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005960 case HLoadClass::LoadKind::kReferrersClass:
5961 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005962 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005963 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005964 case HLoadClass::LoadKind::kBssEntry:
5965 DCHECK(!Runtime::Current()->UseJitCompilation());
5966 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005967 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005968 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005969 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005970 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005971 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005972 break;
5973 }
5974 return desired_class_load_kind;
5975}
5976
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01005977void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005978 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005979 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005980 // Custom calling convention: RAX serves as both input and output.
Vladimir Marko41559982017-01-06 14:04:23 +00005981 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005982 cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00005983 Location::RegisterLocation(RAX),
Vladimir Marko41559982017-01-06 14:04:23 +00005984 Location::RegisterLocation(RAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005985 return;
5986 }
Vladimir Marko41559982017-01-06 14:04:23 +00005987 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005988
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005989 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5990 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005991 ? LocationSummary::kCallOnSlowPath
5992 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005993 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005994 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005995 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005996 }
5997
Vladimir Marko41559982017-01-06 14:04:23 +00005998 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005999 locations->SetInAt(0, Location::RequiresRegister());
6000 }
6001 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006002 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6003 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6004 // Rely on the type resolution and/or initialization to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006005 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006006 } else {
6007 // For non-Baker read barrier we have a temp-clobbering call.
6008 }
6009 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006010}
6011
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006012Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006013 dex::TypeIndex type_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006014 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006015 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006016 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006017 jit_class_patches_.emplace_back(&dex_file, type_index.index_);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006018 PatchInfo<Label>* info = &jit_class_patches_.back();
6019 return &info->label;
6020}
6021
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006022// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6023// move.
6024void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006025 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006026 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006027 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006028 return;
6029 }
Vladimir Marko41559982017-01-06 14:04:23 +00006030 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006031
Vladimir Marko41559982017-01-06 14:04:23 +00006032 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006033 Location out_loc = locations->Out();
6034 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006035
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006036 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6037 ? kWithoutReadBarrier
6038 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006039 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00006040 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006041 case HLoadClass::LoadKind::kReferrersClass: {
6042 DCHECK(!cls->CanCallRuntime());
6043 DCHECK(!cls->MustGenerateClinitCheck());
6044 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6045 CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
6046 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006047 cls,
6048 out_loc,
6049 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Andreas Gampe3db70682018-12-26 15:12:03 -08006050 /* fixup_label= */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006051 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006052 break;
6053 }
6054 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko44ca0752019-07-29 10:18:25 +01006055 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6056 codegen_->GetCompilerOptions().IsBootImageExtension());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006057 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Andreas Gampe3db70682018-12-26 15:12:03 -08006058 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006059 codegen_->RecordBootImageTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006060 break;
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006061 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006062 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006063 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006064 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(cls));
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006065 break;
6066 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006067 case HLoadClass::LoadKind::kBssEntry: {
6068 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006069 /* no_rip= */ false);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006070 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6071 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
6072 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006073 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006074 generate_null_check = true;
6075 break;
6076 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006077 case HLoadClass::LoadKind::kJitBootImageAddress: {
6078 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6079 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6080 DCHECK_NE(address, 0u);
6081 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6082 break;
6083 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006084 case HLoadClass::LoadKind::kJitTableAddress: {
6085 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006086 /* no_rip= */ true);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006087 Label* fixup_label =
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006088 codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006089 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006090 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006091 break;
6092 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006093 default:
6094 LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
6095 UNREACHABLE();
6096 }
6097
6098 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6099 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006100 SlowPathCode* slow_path =
6101 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006102 codegen_->AddSlowPath(slow_path);
6103 if (generate_null_check) {
6104 __ testl(out, out);
6105 __ j(kEqual, slow_path->GetEntryLabel());
6106 }
6107 if (cls->MustGenerateClinitCheck()) {
6108 GenerateClassInitializationCheck(slow_path, out);
6109 } else {
6110 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006111 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006112 }
6113}
6114
6115void LocationsBuilderX86_64::VisitClinitCheck(HClinitCheck* check) {
6116 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006117 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006118 locations->SetInAt(0, Location::RequiresRegister());
6119 if (check->HasUses()) {
6120 locations->SetOut(Location::SameAsFirstInput());
6121 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006122 // Rely on the type initialization to save everything we need.
6123 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006124}
6125
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006126void LocationsBuilderX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6127 // Custom calling convention: RAX serves as both input and output.
6128 Location location = Location::RegisterLocation(RAX);
6129 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
6130}
6131
6132void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6133 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6134}
6135
Orion Hodson18259d72018-04-12 11:18:23 +01006136void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6137 // Custom calling convention: RAX serves as both input and output.
6138 Location location = Location::RegisterLocation(RAX);
6139 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
6140}
6141
6142void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
6143 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6144}
6145
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006146void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006147 // We assume the class to not be null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006148 SlowPathCode* slow_path =
6149 new (codegen_->GetScopedAllocator()) LoadClassSlowPathX86_64(check->GetLoadClass(), check);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006150 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006151 GenerateClassInitializationCheck(slow_path,
6152 check->GetLocations()->InAt(0).AsRegister<CpuRegister>());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006153}
6154
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006155HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
6156 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006157 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006158 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006159 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00006160 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006161 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006162 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006163 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006164 case HLoadString::LoadKind::kJitTableAddress:
6165 DCHECK(Runtime::Current()->UseJitCompilation());
6166 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006167 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006168 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006169 }
6170 return desired_string_load_kind;
6171}
6172
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006173void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006174 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006175 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006176 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006177 locations->SetOut(Location::RegisterLocation(RAX));
6178 } else {
6179 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006180 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
6181 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006182 // Rely on the pResolveString to save everything.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006183 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006184 } else {
6185 // For non-Baker read barrier we have a temp-clobbering call.
6186 }
6187 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006188 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006189}
6190
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006191Label* CodeGeneratorX86_64::NewJitRootStringPatch(const DexFile& dex_file,
Vladimir Marko174b2e22017-10-12 13:34:49 +01006192 dex::StringIndex string_index,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006193 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01006194 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006195 // Add a patch entry and return the label.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006196 jit_string_patches_.emplace_back(&dex_file, string_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006197 PatchInfo<Label>* info = &jit_string_patches_.back();
6198 return &info->label;
6199}
6200
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006201// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6202// move.
6203void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006204 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006205 Location out_loc = locations->Out();
6206 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006207
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006208 switch (load->GetLoadKind()) {
6209 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko44ca0752019-07-29 10:18:25 +01006210 DCHECK(codegen_->GetCompilerOptions().IsBootImage() ||
6211 codegen_->GetCompilerOptions().IsBootImageExtension());
Andreas Gampe3db70682018-12-26 15:12:03 -08006212 __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006213 codegen_->RecordBootImageStringPatch(load);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006214 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006215 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006216 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006217 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Andreas Gampe3db70682018-12-26 15:12:03 -08006218 __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip= */ false));
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006219 codegen_->RecordBootImageRelRoPatch(codegen_->GetBootImageOffset(load));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006220 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006221 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006222 case HLoadString::LoadKind::kBssEntry: {
6223 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006224 /* no_rip= */ false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006225 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6226 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006227 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markod5fd5c32019-07-02 14:46:32 +01006228 // No need for memory fence, thanks to the x86-64 memory model.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006229 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadStringSlowPathX86_64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006230 codegen_->AddSlowPath(slow_path);
6231 __ testl(out, out);
6232 __ j(kEqual, slow_path->GetEntryLabel());
6233 __ Bind(slow_path->GetExitLabel());
6234 return;
6235 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006236 case HLoadString::LoadKind::kJitBootImageAddress: {
6237 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6238 DCHECK_NE(address, 0u);
6239 __ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
6240 return;
6241 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006242 case HLoadString::LoadKind::kJitTableAddress: {
6243 Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
Andreas Gampe3db70682018-12-26 15:12:03 -08006244 /* no_rip= */ true);
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006245 Label* fixup_label = codegen_->NewJitRootStringPatch(
6246 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006247 // /* GcRoot<mirror::String> */ out = *address
6248 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6249 return;
6250 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006251 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006252 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006253 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006254
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006255 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006256 // Custom calling convention: RAX serves as both input and output.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006257 __ movl(CpuRegister(RAX), Immediate(load->GetStringIndex().index_));
Christina Wadsworthabb341b2016-08-31 16:29:44 -07006258 codegen_->InvokeRuntime(kQuickResolveString,
6259 load,
6260 load->GetDexPc());
6261 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006262}
6263
David Brazdilcb1c0552015-08-04 16:22:25 +01006264static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006265 return Address::Absolute(Thread::ExceptionOffset<kX86_64PointerSize>().Int32Value(),
Andreas Gampe3db70682018-12-26 15:12:03 -08006266 /* no_rip= */ true);
David Brazdilcb1c0552015-08-04 16:22:25 +01006267}
6268
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006269void LocationsBuilderX86_64::VisitLoadException(HLoadException* load) {
6270 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006271 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006272 locations->SetOut(Location::RequiresRegister());
6273}
6274
6275void InstructionCodeGeneratorX86_64::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006276 __ gs()->movl(load->GetLocations()->Out().AsRegister<CpuRegister>(), GetExceptionTlsAddress());
6277}
6278
6279void LocationsBuilderX86_64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006280 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006281}
6282
6283void InstructionCodeGeneratorX86_64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6284 __ gs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006285}
6286
6287void LocationsBuilderX86_64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006288 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6289 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006290 InvokeRuntimeCallingConvention calling_convention;
6291 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6292}
6293
6294void InstructionCodeGeneratorX86_64::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006295 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006296 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006297}
6298
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006299// Temp is used for read barrier.
6300static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6301 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006302 !kUseBakerReadBarrier &&
6303 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006304 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006305 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6306 return 1;
6307 }
6308 return 0;
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006309}
6310
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006311// Interface case has 2 temps, one for holding the number of interfaces, one for the current
6312// interface pointer, the current interface is compared in memory.
6313// The other checks have one temp for loading the object's class.
6314static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6315 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6316 return 2;
6317 }
6318 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Nicolas Geoffray53b07bd2016-11-05 15:09:19 +00006319}
6320
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006321void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006322 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006323 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006324 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006325 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006326 case TypeCheckKind::kExactCheck:
6327 case TypeCheckKind::kAbstractClassCheck:
6328 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00006329 case TypeCheckKind::kArrayObjectCheck: {
6330 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
6331 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
6332 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006333 break;
Vladimir Marko87584542017-12-12 17:47:52 +00006334 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006335 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006336 case TypeCheckKind::kUnresolvedCheck:
6337 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006338 call_kind = LocationSummary::kCallOnSlowPath;
6339 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006340 case TypeCheckKind::kBitstringCheck:
6341 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006342 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006343
Vladimir Markoca6fff82017-10-03 14:49:14 +01006344 LocationSummary* locations =
6345 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006346 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006347 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006348 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006349 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006350 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6351 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6352 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6353 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
6354 } else {
6355 locations->SetInAt(1, Location::Any());
6356 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006357 // Note that TypeCheckSlowPathX86_64 uses this "out" register too.
6358 locations->SetOut(Location::RequiresRegister());
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006359 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006360}
6361
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006362void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006363 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006364 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006365 Location obj_loc = locations->InAt(0);
6366 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006367 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006368 Location out_loc = locations->Out();
6369 CpuRegister out = out_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006370 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6371 DCHECK_LE(num_temps, 1u);
6372 Location maybe_temp_loc = (num_temps >= 1u) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006373 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006374 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6375 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6376 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006377 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006378 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006379
6380 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006381 // Avoid null check if we know obj is not null.
6382 if (instruction->MustDoNullCheck()) {
6383 __ testl(obj, obj);
6384 __ j(kEqual, &zero);
6385 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006386
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006387 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006388 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006389 ReadBarrierOption read_barrier_option =
6390 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006391 // /* HeapReference<Class> */ out = obj->klass_
6392 GenerateReferenceLoadTwoRegisters(instruction,
6393 out_loc,
6394 obj_loc,
6395 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006396 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006397 if (cls.IsRegister()) {
6398 __ cmpl(out, cls.AsRegister<CpuRegister>());
6399 } else {
6400 DCHECK(cls.IsStackSlot()) << cls;
6401 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6402 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006403 if (zero.IsLinked()) {
6404 // Classes must be equal for the instanceof to succeed.
6405 __ j(kNotEqual, &zero);
6406 __ movl(out, Immediate(1));
6407 __ jmp(&done);
6408 } else {
6409 __ setcc(kEqual, out);
6410 // setcc only sets the low byte.
6411 __ andl(out, Immediate(1));
6412 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006413 break;
6414 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006415
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006416 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006417 ReadBarrierOption read_barrier_option =
6418 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006419 // /* HeapReference<Class> */ out = obj->klass_
6420 GenerateReferenceLoadTwoRegisters(instruction,
6421 out_loc,
6422 obj_loc,
6423 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006424 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006425 // If the class is abstract, we eagerly fetch the super class of the
6426 // object to avoid doing a comparison we know will fail.
6427 NearLabel loop, success;
6428 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006429 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006430 GenerateReferenceLoadOneRegister(instruction,
6431 out_loc,
6432 super_offset,
6433 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006434 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006435 __ testl(out, out);
6436 // If `out` is null, we use it for the result, and jump to `done`.
6437 __ j(kEqual, &done);
6438 if (cls.IsRegister()) {
6439 __ cmpl(out, cls.AsRegister<CpuRegister>());
6440 } else {
6441 DCHECK(cls.IsStackSlot()) << cls;
6442 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6443 }
6444 __ j(kNotEqual, &loop);
6445 __ movl(out, Immediate(1));
6446 if (zero.IsLinked()) {
6447 __ jmp(&done);
6448 }
6449 break;
6450 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006451
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006452 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006453 ReadBarrierOption read_barrier_option =
6454 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006455 // /* HeapReference<Class> */ out = obj->klass_
6456 GenerateReferenceLoadTwoRegisters(instruction,
6457 out_loc,
6458 obj_loc,
6459 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006460 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006461 // Walk over the class hierarchy to find a match.
6462 NearLabel loop, success;
6463 __ Bind(&loop);
6464 if (cls.IsRegister()) {
6465 __ cmpl(out, cls.AsRegister<CpuRegister>());
6466 } else {
6467 DCHECK(cls.IsStackSlot()) << cls;
6468 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6469 }
6470 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006471 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006472 GenerateReferenceLoadOneRegister(instruction,
6473 out_loc,
6474 super_offset,
6475 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006476 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006477 __ testl(out, out);
6478 __ j(kNotEqual, &loop);
6479 // If `out` is null, we use it for the result, and jump to `done`.
6480 __ jmp(&done);
6481 __ Bind(&success);
6482 __ movl(out, Immediate(1));
6483 if (zero.IsLinked()) {
6484 __ jmp(&done);
6485 }
6486 break;
6487 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006488
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006489 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00006490 ReadBarrierOption read_barrier_option =
6491 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006492 // /* HeapReference<Class> */ out = obj->klass_
6493 GenerateReferenceLoadTwoRegisters(instruction,
6494 out_loc,
6495 obj_loc,
6496 class_offset,
Vladimir Marko87584542017-12-12 17:47:52 +00006497 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006498 // Do an exact check.
6499 NearLabel exact_check;
6500 if (cls.IsRegister()) {
6501 __ cmpl(out, cls.AsRegister<CpuRegister>());
6502 } else {
6503 DCHECK(cls.IsStackSlot()) << cls;
6504 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6505 }
6506 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006507 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006508 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006509 GenerateReferenceLoadOneRegister(instruction,
6510 out_loc,
6511 component_offset,
6512 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00006513 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006514 __ testl(out, out);
6515 // If `out` is null, we use it for the result, and jump to `done`.
6516 __ j(kEqual, &done);
6517 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6518 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006519 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006520 __ movl(out, Immediate(1));
6521 __ jmp(&done);
6522 break;
6523 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006524
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006525 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006526 // No read barrier since the slow path will retry upon failure.
6527 // /* HeapReference<Class> */ out = obj->klass_
6528 GenerateReferenceLoadTwoRegisters(instruction,
6529 out_loc,
6530 obj_loc,
6531 class_offset,
6532 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006533 if (cls.IsRegister()) {
6534 __ cmpl(out, cls.AsRegister<CpuRegister>());
6535 } else {
6536 DCHECK(cls.IsStackSlot()) << cls;
6537 __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
6538 }
6539 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006540 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006541 instruction, /* is_fatal= */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006542 codegen_->AddSlowPath(slow_path);
6543 __ j(kNotEqual, slow_path->GetEntryLabel());
6544 __ movl(out, Immediate(1));
6545 if (zero.IsLinked()) {
6546 __ jmp(&done);
6547 }
6548 break;
6549 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006550
Calin Juravle98893e12015-10-02 21:05:03 +01006551 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006552 case TypeCheckKind::kInterfaceCheck: {
6553 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006554 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006555 // cases.
6556 //
6557 // We cannot directly call the InstanceofNonTrivial runtime
6558 // entry point without resorting to a type checking slow path
6559 // here (i.e. by calling InvokeRuntime directly), as it would
6560 // require to assign fixed registers for the inputs of this
6561 // HInstanceOf instruction (following the runtime calling
6562 // convention), which might be cluttered by the potential first
6563 // read barrier emission at the beginning of this method.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006564 //
6565 // TODO: Introduce a new runtime entry point taking the object
6566 // to test (instead of its class) as argument, and let it deal
6567 // with the read barrier issues. This will let us refactor this
6568 // case of the `switch` code as it was previously (with a direct
6569 // call to the runtime not using a type checking slow path).
6570 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006571 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006572 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08006573 instruction, /* is_fatal= */ false);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006574 codegen_->AddSlowPath(slow_path);
6575 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006576 if (zero.IsLinked()) {
6577 __ jmp(&done);
6578 }
6579 break;
6580 }
Vladimir Marko175e7862018-03-27 09:03:13 +00006581
6582 case TypeCheckKind::kBitstringCheck: {
6583 // /* HeapReference<Class> */ temp = obj->klass_
6584 GenerateReferenceLoadTwoRegisters(instruction,
6585 out_loc,
6586 obj_loc,
6587 class_offset,
6588 kWithoutReadBarrier);
6589
6590 GenerateBitstringTypeCheckCompare(instruction, out);
6591 if (zero.IsLinked()) {
6592 __ j(kNotEqual, &zero);
6593 __ movl(out, Immediate(1));
6594 __ jmp(&done);
6595 } else {
6596 __ setcc(kEqual, out);
6597 // setcc only sets the low byte.
6598 __ andl(out, Immediate(1));
6599 }
6600 break;
6601 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006602 }
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006603
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006604 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006605 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006606 __ xorl(out, out);
6607 }
6608
6609 if (done.IsLinked()) {
6610 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006611 }
6612
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006613 if (slow_path != nullptr) {
6614 __ Bind(slow_path->GetExitLabel());
6615 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006616}
6617
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006618void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
Andreas Gampeb5f3d812016-11-04 19:25:20 -07006619 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00006620 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006621 LocationSummary* locations =
6622 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006623 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006624 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6625 // Require a register for the interface check since there is a loop that compares the class to
6626 // a memory address.
6627 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00006628 } else if (type_check_kind == TypeCheckKind::kBitstringCheck) {
6629 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
6630 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
6631 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006632 } else {
6633 locations->SetInAt(1, Location::Any());
6634 }
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006635 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathX86.
6636 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006637}
6638
6639void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00006640 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006641 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006642 Location obj_loc = locations->InAt(0);
6643 CpuRegister obj = obj_loc.AsRegister<CpuRegister>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006644 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006645 Location temp_loc = locations->GetTemp(0);
6646 CpuRegister temp = temp_loc.AsRegister<CpuRegister>();
Vladimir Marko9f8d3122018-04-06 13:47:59 +01006647 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6648 DCHECK_GE(num_temps, 1u);
6649 DCHECK_LE(num_temps, 2u);
6650 Location maybe_temp2_loc = (num_temps >= 2u) ? locations->GetTemp(1) : Location::NoLocation();
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006651 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6652 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6653 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6654 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6655 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6656 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006657 const uint32_t object_array_data_offset =
6658 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006659
Vladimir Marko87584542017-12-12 17:47:52 +00006660 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006661 SlowPathCode* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006662 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathX86_64(
6663 instruction, is_type_check_slow_path_fatal);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006664 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006665
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006666
6667 NearLabel done;
6668 // Avoid null check if we know obj is not null.
6669 if (instruction->MustDoNullCheck()) {
6670 __ testl(obj, obj);
6671 __ j(kEqual, &done);
6672 }
6673
Roland Levillain0d5a2812015-11-13 10:07:31 +00006674 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006675 case TypeCheckKind::kExactCheck:
6676 case TypeCheckKind::kArrayCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006677 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006678 GenerateReferenceLoadTwoRegisters(instruction,
6679 temp_loc,
6680 obj_loc,
6681 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006682 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006683 if (cls.IsRegister()) {
6684 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6685 } else {
6686 DCHECK(cls.IsStackSlot()) << cls;
6687 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6688 }
6689 // Jump to slow path for throwing the exception or doing a
6690 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006691 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006692 break;
6693 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006694
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006695 case TypeCheckKind::kAbstractClassCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006696 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006697 GenerateReferenceLoadTwoRegisters(instruction,
6698 temp_loc,
6699 obj_loc,
6700 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006701 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006702 // If the class is abstract, we eagerly fetch the super class of the
6703 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006704 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006705 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006706 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006707 GenerateReferenceLoadOneRegister(instruction,
6708 temp_loc,
6709 super_offset,
6710 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006711 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006712
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006713 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6714 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006715 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006716 // Otherwise, compare the classes.
6717 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006718 if (cls.IsRegister()) {
6719 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6720 } else {
6721 DCHECK(cls.IsStackSlot()) << cls;
6722 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6723 }
6724 __ j(kNotEqual, &loop);
6725 break;
6726 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006727
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006728 case TypeCheckKind::kClassHierarchyCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006729 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006730 GenerateReferenceLoadTwoRegisters(instruction,
6731 temp_loc,
6732 obj_loc,
6733 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006734 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006735 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006736 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006737 __ Bind(&loop);
6738 if (cls.IsRegister()) {
6739 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6740 } else {
6741 DCHECK(cls.IsStackSlot()) << cls;
6742 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6743 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006744 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745
Roland Levillain0d5a2812015-11-13 10:07:31 +00006746 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006747 GenerateReferenceLoadOneRegister(instruction,
6748 temp_loc,
6749 super_offset,
6750 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006751 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006752
6753 // If the class reference currently in `temp` is not null, jump
6754 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006755 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006756 __ j(kNotZero, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006757 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006758 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006759 break;
6760 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006762 case TypeCheckKind::kArrayObjectCheck: {
Roland Levillain86503782016-02-11 19:07:30 +00006763 // /* HeapReference<Class> */ temp = obj->klass_
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006764 GenerateReferenceLoadTwoRegisters(instruction,
6765 temp_loc,
6766 obj_loc,
6767 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006768 kWithoutReadBarrier);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006769 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006770 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006771 if (cls.IsRegister()) {
6772 __ cmpl(temp, cls.AsRegister<CpuRegister>());
6773 } else {
6774 DCHECK(cls.IsStackSlot()) << cls;
6775 __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
6776 }
6777 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006778
6779 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006780 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006781 GenerateReferenceLoadOneRegister(instruction,
6782 temp_loc,
6783 component_offset,
6784 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006785 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006786
6787 // If the component type is not null (i.e. the object is indeed
6788 // an array), jump to label `check_non_primitive_component_type`
6789 // to further check that this component type is not a primitive
6790 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006791 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006792 // Otherwise, jump to the slow path to throw the exception.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006793 __ j(kZero, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006794 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006795 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006796 break;
6797 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006798
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006799 case TypeCheckKind::kUnresolvedCheck: {
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006800 // We always go into the type check slow path for the unresolved case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006801 //
6802 // We cannot directly call the CheckCast runtime entry point
6803 // without resorting to a type checking slow path here (i.e. by
6804 // calling InvokeRuntime directly), as it would require to
6805 // assign fixed registers for the inputs of this HInstanceOf
6806 // instruction (following the runtime calling convention), which
6807 // might be cluttered by the potential first read barrier
6808 // emission at the beginning of this method.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006809 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006810 break;
6811 }
6812
Vladimir Marko175e7862018-03-27 09:03:13 +00006813 case TypeCheckKind::kInterfaceCheck: {
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006814 // Fast path for the interface check. Try to avoid read barriers to improve the fast path.
6815 // We can not get false positives by doing this.
6816 // /* HeapReference<Class> */ temp = obj->klass_
6817 GenerateReferenceLoadTwoRegisters(instruction,
6818 temp_loc,
6819 obj_loc,
6820 class_offset,
6821 kWithoutReadBarrier);
Mathieu Chartiercdba73b2016-11-03 19:23:06 -07006822
Vladimir Markoe619f6c2017-12-12 16:00:01 +00006823 // /* HeapReference<Class> */ temp = temp->iftable_
6824 GenerateReferenceLoadTwoRegisters(instruction,
6825 temp_loc,
6826 temp_loc,
6827 iftable_offset,
6828 kWithoutReadBarrier);
6829 // Iftable is never null.
6830 __ movl(maybe_temp2_loc.AsRegister<CpuRegister>(), Address(temp, array_length_offset));
6831 // Maybe poison the `cls` for direct comparison with memory.
6832 __ MaybePoisonHeapReference(cls.AsRegister<CpuRegister>());
6833 // Loop through the iftable and check if any class matches.
6834 NearLabel start_loop;
6835 __ Bind(&start_loop);
6836 // Need to subtract first to handle the empty array case.
6837 __ subl(maybe_temp2_loc.AsRegister<CpuRegister>(), Immediate(2));
6838 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6839 // Go to next interface if the classes do not match.
6840 __ cmpl(cls.AsRegister<CpuRegister>(),
6841 CodeGeneratorX86_64::ArrayAddress(temp,
6842 maybe_temp2_loc,
6843 TIMES_4,
6844 object_array_data_offset));
6845 __ j(kNotEqual, &start_loop); // Return if same class.
6846 // If `cls` was poisoned above, unpoison it.
6847 __ MaybeUnpoisonHeapReference(cls.AsRegister<CpuRegister>());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006848 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00006849 }
6850
6851 case TypeCheckKind::kBitstringCheck: {
6852 // /* HeapReference<Class> */ temp = obj->klass_
6853 GenerateReferenceLoadTwoRegisters(instruction,
6854 temp_loc,
6855 obj_loc,
6856 class_offset,
6857 kWithoutReadBarrier);
6858
6859 GenerateBitstringTypeCheckCompare(instruction, temp);
6860 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
6861 break;
6862 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006863 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006864
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006865 if (done.IsLinked()) {
6866 __ Bind(&done);
6867 }
6868
Roland Levillain0d5a2812015-11-13 10:07:31 +00006869 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006870}
6871
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006872void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006873 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6874 instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006875 InvokeRuntimeCallingConvention calling_convention;
6876 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6877}
6878
6879void InstructionCodeGeneratorX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006880 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006881 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006882 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006883 if (instruction->IsEnter()) {
6884 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6885 } else {
6886 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6887 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006888}
6889
Shalini Salomi Bodapatidd121f62018-10-26 15:03:53 +05306890void LocationsBuilderX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6891 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6892 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6893 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6894 locations->SetInAt(0, Location::RequiresRegister());
6895 // There is no immediate variant of negated bitwise and in X86.
6896 locations->SetInAt(1, Location::RequiresRegister());
6897 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6898}
6899
6900void LocationsBuilderX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6901 DCHECK(codegen_->GetInstructionSetFeatures().HasAVX2());
6902 DCHECK(DataType::IsIntOrLongType(instruction->GetType())) << instruction->GetType();
6903 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
6904 locations->SetInAt(0, Location::RequiresRegister());
6905 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6906}
6907
6908void InstructionCodeGeneratorX86_64::VisitX86AndNot(HX86AndNot* instruction) {
6909 LocationSummary* locations = instruction->GetLocations();
6910 Location first = locations->InAt(0);
6911 Location second = locations->InAt(1);
6912 Location dest = locations->Out();
6913 __ andn(dest.AsRegister<CpuRegister>(), first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
6914}
6915
6916void InstructionCodeGeneratorX86_64::VisitX86MaskOrResetLeastSetBit(HX86MaskOrResetLeastSetBit* instruction) {
6917 LocationSummary* locations = instruction->GetLocations();
6918 Location src = locations->InAt(0);
6919 Location dest = locations->Out();
6920 switch (instruction->GetOpKind()) {
6921 case HInstruction::kAnd:
6922 __ blsr(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6923 break;
6924 case HInstruction::kXor:
6925 __ blsmsk(dest.AsRegister<CpuRegister>(), src.AsRegister<CpuRegister>());
6926 break;
6927 default:
6928 LOG(FATAL) << "Unreachable";
6929 }
6930}
6931
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006932void LocationsBuilderX86_64::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6933void LocationsBuilderX86_64::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6934void LocationsBuilderX86_64::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6935
6936void LocationsBuilderX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6937 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006938 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006939 DCHECK(instruction->GetResultType() == DataType::Type::kInt32
6940 || instruction->GetResultType() == DataType::Type::kInt64);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006941 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell40741f32015-04-20 22:10:34 -04006942 locations->SetInAt(1, Location::Any());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006943 locations->SetOut(Location::SameAsFirstInput());
6944}
6945
6946void InstructionCodeGeneratorX86_64::VisitAnd(HAnd* instruction) {
6947 HandleBitwiseOperation(instruction);
6948}
6949
6950void InstructionCodeGeneratorX86_64::VisitOr(HOr* instruction) {
6951 HandleBitwiseOperation(instruction);
6952}
6953
6954void InstructionCodeGeneratorX86_64::VisitXor(HXor* instruction) {
6955 HandleBitwiseOperation(instruction);
6956}
6957
6958void InstructionCodeGeneratorX86_64::HandleBitwiseOperation(HBinaryOperation* instruction) {
6959 LocationSummary* locations = instruction->GetLocations();
6960 Location first = locations->InAt(0);
6961 Location second = locations->InAt(1);
6962 DCHECK(first.Equals(locations->Out()));
6963
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006964 if (instruction->GetResultType() == DataType::Type::kInt32) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006965 if (second.IsRegister()) {
6966 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006967 __ andl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006968 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006969 __ orl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006970 } else {
6971 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006972 __ xorl(first.AsRegister<CpuRegister>(), second.AsRegister<CpuRegister>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006973 }
6974 } else if (second.IsConstant()) {
6975 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
6976 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006977 __ andl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006978 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006979 __ orl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006980 } else {
6981 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006982 __ xorl(first.AsRegister<CpuRegister>(), imm);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006983 }
6984 } else {
6985 Address address(CpuRegister(RSP), second.GetStackIndex());
6986 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006987 __ andl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006988 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006989 __ orl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006990 } else {
6991 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006992 __ xorl(first.AsRegister<CpuRegister>(), address);
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006993 }
6994 }
6995 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006996 DCHECK_EQ(instruction->GetResultType(), DataType::Type::kInt64);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006997 CpuRegister first_reg = first.AsRegister<CpuRegister>();
6998 bool second_is_constant = false;
6999 int64_t value = 0;
7000 if (second.IsConstant()) {
7001 second_is_constant = true;
7002 value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007003 }
Mark Mendell40741f32015-04-20 22:10:34 -04007004 bool is_int32_value = IsInt<32>(value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007005
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007006 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007007 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007008 if (is_int32_value) {
7009 __ andq(first_reg, Immediate(static_cast<int32_t>(value)));
7010 } else {
7011 __ andq(first_reg, codegen_->LiteralInt64Address(value));
7012 }
7013 } else if (second.IsDoubleStackSlot()) {
7014 __ andq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007015 } else {
7016 __ andq(first_reg, second.AsRegister<CpuRegister>());
7017 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007018 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007019 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007020 if (is_int32_value) {
7021 __ orq(first_reg, Immediate(static_cast<int32_t>(value)));
7022 } else {
7023 __ orq(first_reg, codegen_->LiteralInt64Address(value));
7024 }
7025 } else if (second.IsDoubleStackSlot()) {
7026 __ orq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007027 } else {
7028 __ orq(first_reg, second.AsRegister<CpuRegister>());
7029 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007030 } else {
7031 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007032 if (second_is_constant) {
Mark Mendell40741f32015-04-20 22:10:34 -04007033 if (is_int32_value) {
7034 __ xorq(first_reg, Immediate(static_cast<int32_t>(value)));
7035 } else {
7036 __ xorq(first_reg, codegen_->LiteralInt64Address(value));
7037 }
7038 } else if (second.IsDoubleStackSlot()) {
7039 __ xorq(first_reg, Address(CpuRegister(RSP), second.GetStackIndex()));
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007040 } else {
7041 __ xorq(first_reg, second.AsRegister<CpuRegister>());
7042 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007043 }
7044 }
7045}
7046
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007047void InstructionCodeGeneratorX86_64::GenerateReferenceLoadOneRegister(
7048 HInstruction* instruction,
7049 Location out,
7050 uint32_t offset,
7051 Location maybe_temp,
7052 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007053 CpuRegister out_reg = out.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007054 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007055 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007056 if (kUseBakerReadBarrier) {
7057 // Load with fast path based Baker's read barrier.
7058 // /* HeapReference<Object> */ out = *(out + offset)
7059 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007060 instruction, out, out_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007061 } else {
7062 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007063 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007064 // in the following move operation, as we will need it for the
7065 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007066 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007067 __ movl(maybe_temp.AsRegister<CpuRegister>(), out_reg);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007068 // /* HeapReference<Object> */ out = *(out + offset)
7069 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007070 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007071 }
7072 } else {
7073 // Plain load with no read barrier.
7074 // /* HeapReference<Object> */ out = *(out + offset)
7075 __ movl(out_reg, Address(out_reg, offset));
7076 __ MaybeUnpoisonHeapReference(out_reg);
7077 }
7078}
7079
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007080void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(
7081 HInstruction* instruction,
7082 Location out,
7083 Location obj,
7084 uint32_t offset,
7085 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007086 CpuRegister out_reg = out.AsRegister<CpuRegister>();
7087 CpuRegister obj_reg = obj.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007088 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08007089 CHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007090 if (kUseBakerReadBarrier) {
7091 // Load with fast path based Baker's read barrier.
7092 // /* HeapReference<Object> */ out = *(obj + offset)
7093 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Andreas Gampe3db70682018-12-26 15:12:03 -08007094 instruction, out, obj_reg, offset, /* needs_null_check= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007095 } else {
7096 // Load with slow path based read barrier.
7097 // /* HeapReference<Object> */ out = *(obj + offset)
7098 __ movl(out_reg, Address(obj_reg, offset));
7099 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7100 }
7101 } else {
7102 // Plain load with no read barrier.
7103 // /* HeapReference<Object> */ out = *(obj + offset)
7104 __ movl(out_reg, Address(obj_reg, offset));
7105 __ MaybeUnpoisonHeapReference(out_reg);
7106 }
7107}
7108
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007109void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(
7110 HInstruction* instruction,
7111 Location root,
7112 const Address& address,
7113 Label* fixup_label,
7114 ReadBarrierOption read_barrier_option) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007115 CpuRegister root_reg = root.AsRegister<CpuRegister>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007116 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007117 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007118 if (kUseBakerReadBarrier) {
7119 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7120 // Baker's read barrier are used:
7121 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007122 // root = obj.field;
7123 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7124 // if (temp != null) {
7125 // root = temp(root)
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007126 // }
7127
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007128 // /* GcRoot<mirror::Object> */ root = *address
7129 __ movl(root_reg, address);
7130 if (fixup_label != nullptr) {
7131 __ Bind(fixup_label);
7132 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007133 static_assert(
7134 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7135 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7136 "have different sizes.");
7137 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7138 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7139 "have different sizes.");
7140
Vladimir Marko953437b2016-08-24 08:30:46 +00007141 // Slow path marking the GC root `root`.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007142 SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007143 instruction, root, /* unpoison_ref_before_marking= */ false);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007144 codegen_->AddSlowPath(slow_path);
7145
Roland Levillaind966ce72017-02-09 16:20:14 +00007146 // Test the `Thread::Current()->pReadBarrierMarkReg ## root.reg()` entrypoint.
7147 const int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +01007148 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(root.reg());
Andreas Gampe3db70682018-12-26 15:12:03 -08007149 __ gs()->cmpl(Address::Absolute(entry_point_offset, /* no_rip= */ true), Immediate(0));
Roland Levillaind966ce72017-02-09 16:20:14 +00007150 // The entrypoint is null when the GC is not marking.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007151 __ j(kNotEqual, slow_path->GetEntryLabel());
7152 __ Bind(slow_path->GetExitLabel());
7153 } else {
7154 // GC root loaded through a slow path for read barriers other
7155 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007156 // /* GcRoot<mirror::Object>* */ root = address
7157 __ leaq(root_reg, address);
7158 if (fixup_label != nullptr) {
7159 __ Bind(fixup_label);
7160 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007161 // /* mirror::Object* */ root = root->Read()
7162 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7163 }
7164 } else {
7165 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007166 // /* GcRoot<mirror::Object> */ root = *address
7167 __ movl(root_reg, address);
7168 if (fixup_label != nullptr) {
7169 __ Bind(fixup_label);
7170 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007171 // Note that GC roots are not affected by heap poisoning, thus we
7172 // do not have to unpoison `root_reg` here.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007173 }
7174}
7175
7176void CodeGeneratorX86_64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7177 Location ref,
7178 CpuRegister obj,
7179 uint32_t offset,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007180 bool needs_null_check) {
7181 DCHECK(kEmitCompilerReadBarrier);
7182 DCHECK(kUseBakerReadBarrier);
7183
7184 // /* HeapReference<Object> */ ref = *(obj + offset)
7185 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007186 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007187}
7188
7189void CodeGeneratorX86_64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7190 Location ref,
7191 CpuRegister obj,
7192 uint32_t data_offset,
7193 Location index,
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007194 bool needs_null_check) {
7195 DCHECK(kEmitCompilerReadBarrier);
7196 DCHECK(kUseBakerReadBarrier);
7197
Roland Levillain3d312422016-06-23 13:53:42 +01007198 static_assert(
7199 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7200 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007201 // /* HeapReference<Object> */ ref =
7202 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007203 Address src = CodeGeneratorX86_64::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007204 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007205}
7206
7207void CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7208 Location ref,
7209 CpuRegister obj,
7210 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007211 bool needs_null_check,
7212 bool always_update_field,
7213 CpuRegister* temp1,
7214 CpuRegister* temp2) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007215 DCHECK(kEmitCompilerReadBarrier);
7216 DCHECK(kUseBakerReadBarrier);
7217
7218 // In slow path based read barriers, the read barrier call is
7219 // inserted after the original load. However, in fast path based
7220 // Baker's read barriers, we need to perform the load of
7221 // mirror::Object::monitor_ *before* the original reference load.
7222 // This load-load ordering is required by the read barrier.
7223 // The fast path/slow path (for Baker's algorithm) should look like:
7224 //
7225 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7226 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7227 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007228 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007229 // if (is_gray) {
7230 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7231 // }
7232 //
7233 // Note: the original implementation in ReadBarrier::Barrier is
7234 // slightly more complex as:
7235 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007236 // the high-bits of rb_state, which are expected to be all zeroes
7237 // (we use CodeGeneratorX86_64::GenerateMemoryBarrier instead
7238 // here, which is a no-op thanks to the x86-64 memory model);
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007239 // - it performs additional checks that we do not do here for
7240 // performance reasons.
7241
7242 CpuRegister ref_reg = ref.AsRegister<CpuRegister>();
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007243 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7244
Vladimir Marko953437b2016-08-24 08:30:46 +00007245 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01007246 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007247 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007248 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7249 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7250 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7251
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007252 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007253 // ref = ReadBarrier::Mark(ref);
7254 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7255 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007256 if (needs_null_check) {
7257 MaybeRecordImplicitNullCheck(instruction);
7258 }
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007259
7260 // Load fence to prevent load-load reordering.
7261 // Note that this is a no-op, thanks to the x86-64 memory model.
7262 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7263
7264 // The actual reference load.
7265 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007266 __ movl(ref_reg, src); // Flags are unaffected.
7267
7268 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7269 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007270 SlowPathCode* slow_path;
7271 if (always_update_field) {
7272 DCHECK(temp1 != nullptr);
7273 DCHECK(temp2 != nullptr);
Vladimir Marko174b2e22017-10-12 13:34:49 +01007274 slow_path = new (GetScopedAllocator()) ReadBarrierMarkAndUpdateFieldSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007275 instruction, ref, obj, src, /* unpoison_ref_before_marking= */ true, *temp1, *temp2);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007276 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01007277 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathX86_64(
Andreas Gampe3db70682018-12-26 15:12:03 -08007278 instruction, ref, /* unpoison_ref_before_marking= */ true);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007279 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007280 AddSlowPath(slow_path);
7281
7282 // We have done the "if" of the gray bit check above, now branch based on the flags.
7283 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007284
7285 // Object* ref = ref_addr->AsMirrorPtr()
7286 __ MaybeUnpoisonHeapReference(ref_reg);
7287
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007288 __ Bind(slow_path->GetExitLabel());
7289}
7290
7291void CodeGeneratorX86_64::GenerateReadBarrierSlow(HInstruction* instruction,
7292 Location out,
7293 Location ref,
7294 Location obj,
7295 uint32_t offset,
7296 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007297 DCHECK(kEmitCompilerReadBarrier);
7298
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007299 // Insert a slow path based read barrier *after* the reference load.
7300 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007301 // If heap poisoning is enabled, the unpoisoning of the loaded
7302 // reference will be carried out by the runtime within the slow
7303 // path.
7304 //
7305 // Note that `ref` currently does not get unpoisoned (when heap
7306 // poisoning is enabled), which is alright as the `ref` argument is
7307 // not used by the artReadBarrierSlow entry point.
7308 //
7309 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01007310 SlowPathCode* slow_path = new (GetScopedAllocator())
Roland Levillain0d5a2812015-11-13 10:07:31 +00007311 ReadBarrierForHeapReferenceSlowPathX86_64(instruction, out, ref, obj, offset, index);
7312 AddSlowPath(slow_path);
7313
Roland Levillain0d5a2812015-11-13 10:07:31 +00007314 __ jmp(slow_path->GetEntryLabel());
7315 __ Bind(slow_path->GetExitLabel());
7316}
7317
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007318void CodeGeneratorX86_64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7319 Location out,
7320 Location ref,
7321 Location obj,
7322 uint32_t offset,
7323 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007324 if (kEmitCompilerReadBarrier) {
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007325 // Baker's read barriers shall be handled by the fast path
7326 // (CodeGeneratorX86_64::GenerateReferenceLoadWithBakerReadBarrier).
7327 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007328 // If heap poisoning is enabled, unpoisoning will be taken care of
7329 // by the runtime within the slow path.
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007330 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007331 } else if (kPoisonHeapReferences) {
7332 __ UnpoisonHeapReference(out.AsRegister<CpuRegister>());
7333 }
7334}
7335
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007336void CodeGeneratorX86_64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7337 Location out,
7338 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007339 DCHECK(kEmitCompilerReadBarrier);
7340
Roland Levillain1e7f8db2015-12-15 10:54:19 +00007341 // Insert a slow path based read barrier *after* the GC root load.
7342 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007343 // Note that GC roots are not affected by heap poisoning, so we do
7344 // not need to do anything special for this here.
7345 SlowPathCode* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01007346 new (GetScopedAllocator()) ReadBarrierForRootSlowPathX86_64(instruction, out, root);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007347 AddSlowPath(slow_path);
7348
Roland Levillain0d5a2812015-11-13 10:07:31 +00007349 __ jmp(slow_path->GetEntryLabel());
7350 __ Bind(slow_path->GetExitLabel());
7351}
7352
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007353void LocationsBuilderX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007354 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007355 LOG(FATAL) << "Unreachable";
7356}
7357
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007358void InstructionCodeGeneratorX86_64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007359 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007360 LOG(FATAL) << "Unreachable";
7361}
7362
Mark Mendellfe57faa2015-09-18 09:26:15 -04007363// Simple implementation of packed switch - generate cascaded compare/jumps.
7364void LocationsBuilderX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7365 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007366 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007367 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell9c86b482015-09-18 13:36:07 -04007368 locations->AddTemp(Location::RequiresRegister());
7369 locations->AddTemp(Location::RequiresRegister());
Mark Mendellfe57faa2015-09-18 09:26:15 -04007370}
7371
7372void InstructionCodeGeneratorX86_64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7373 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007374 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04007375 LocationSummary* locations = switch_instr->GetLocations();
Mark Mendell9c86b482015-09-18 13:36:07 -04007376 CpuRegister value_reg_in = locations->InAt(0).AsRegister<CpuRegister>();
7377 CpuRegister temp_reg = locations->GetTemp(0).AsRegister<CpuRegister>();
7378 CpuRegister base_reg = locations->GetTemp(1).AsRegister<CpuRegister>();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007379 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7380
7381 // Should we generate smaller inline compare/jumps?
7382 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7383 // Figure out the correct compare values and jump conditions.
7384 // Handle the first compare/branch as a special case because it might
7385 // jump to the default case.
7386 DCHECK_GT(num_entries, 2u);
7387 Condition first_condition;
7388 uint32_t index;
7389 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
7390 if (lower_bound != 0) {
7391 first_condition = kLess;
7392 __ cmpl(value_reg_in, Immediate(lower_bound));
7393 __ j(first_condition, codegen_->GetLabelOf(default_block));
7394 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
7395
7396 index = 1;
7397 } else {
7398 // Handle all the compare/jumps below.
7399 first_condition = kBelow;
7400 index = 0;
7401 }
7402
7403 // Handle the rest of the compare/jumps.
7404 for (; index + 1 < num_entries; index += 2) {
7405 int32_t compare_to_value = lower_bound + index + 1;
7406 __ cmpl(value_reg_in, Immediate(compare_to_value));
7407 // Jump to successors[index] if value < case_value[index].
7408 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7409 // Jump to successors[index + 1] if value == case_value[index + 1].
7410 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7411 }
7412
7413 if (index != num_entries) {
7414 // There are an odd number of entries. Handle the last one.
7415 DCHECK_EQ(index + 1, num_entries);
Nicolas Geoffray6ce01732015-12-30 14:10:13 +00007416 __ cmpl(value_reg_in, Immediate(static_cast<int32_t>(lower_bound + index)));
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007417 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
7418 }
7419
7420 // And the default for any other value.
7421 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
7422 __ jmp(codegen_->GetLabelOf(default_block));
7423 }
7424 return;
7425 }
Mark Mendell9c86b482015-09-18 13:36:07 -04007426
7427 // Remove the bias, if needed.
7428 Register value_reg_out = value_reg_in.AsRegister();
7429 if (lower_bound != 0) {
7430 __ leal(temp_reg, Address(value_reg_in, -lower_bound));
7431 value_reg_out = temp_reg.AsRegister();
7432 }
7433 CpuRegister value_reg(value_reg_out);
7434
7435 // Is the value in range?
Mark Mendell9c86b482015-09-18 13:36:07 -04007436 __ cmpl(value_reg, Immediate(num_entries - 1));
7437 __ j(kAbove, codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007438
Mark Mendell9c86b482015-09-18 13:36:07 -04007439 // We are in the range of the table.
7440 // Load the address of the jump table in the constant area.
7441 __ leaq(base_reg, codegen_->LiteralCaseTable(switch_instr));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007442
Mark Mendell9c86b482015-09-18 13:36:07 -04007443 // Load the (signed) offset from the jump table.
7444 __ movsxd(temp_reg, Address(base_reg, value_reg, TIMES_4, 0));
7445
7446 // Add the offset to the address of the table base.
7447 __ addq(temp_reg, base_reg);
7448
7449 // And jump.
7450 __ jmp(temp_reg);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007451}
7452
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007453void LocationsBuilderX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7454 ATTRIBUTE_UNUSED) {
7455 LOG(FATAL) << "Unreachable";
7456}
7457
7458void InstructionCodeGeneratorX86_64::VisitIntermediateAddress(HIntermediateAddress* instruction
7459 ATTRIBUTE_UNUSED) {
7460 LOG(FATAL) << "Unreachable";
7461}
7462
Aart Bikc5d47542016-01-27 17:00:35 -08007463void CodeGeneratorX86_64::Load32BitValue(CpuRegister dest, int32_t value) {
7464 if (value == 0) {
7465 __ xorl(dest, dest);
7466 } else {
7467 __ movl(dest, Immediate(value));
7468 }
7469}
7470
Mark Mendell92e83bf2015-05-07 11:25:03 -04007471void CodeGeneratorX86_64::Load64BitValue(CpuRegister dest, int64_t value) {
7472 if (value == 0) {
Aart Bikc5d47542016-01-27 17:00:35 -08007473 // Clears upper bits too.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007474 __ xorl(dest, dest);
Vladimir Markoed009782016-02-22 16:54:39 +00007475 } else if (IsUint<32>(value)) {
7476 // We can use a 32 bit move, as it will zero-extend and is shorter.
Mark Mendell92e83bf2015-05-07 11:25:03 -04007477 __ movl(dest, Immediate(static_cast<int32_t>(value)));
7478 } else {
7479 __ movq(dest, Immediate(value));
7480 }
7481}
7482
Mark Mendell7c0b44f2016-02-01 10:08:35 -05007483void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, int32_t value) {
7484 if (value == 0) {
7485 __ xorps(dest, dest);
7486 } else {
7487 __ movss(dest, LiteralInt32Address(value));
7488 }
7489}
7490
7491void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, int64_t value) {
7492 if (value == 0) {
7493 __ xorpd(dest, dest);
7494 } else {
7495 __ movsd(dest, LiteralInt64Address(value));
7496 }
7497}
7498
7499void CodeGeneratorX86_64::Load32BitValue(XmmRegister dest, float value) {
7500 Load32BitValue(dest, bit_cast<int32_t, float>(value));
7501}
7502
7503void CodeGeneratorX86_64::Load64BitValue(XmmRegister dest, double value) {
7504 Load64BitValue(dest, bit_cast<int64_t, double>(value));
7505}
7506
Aart Bika19616e2016-02-01 18:57:58 -08007507void CodeGeneratorX86_64::Compare32BitValue(CpuRegister dest, int32_t value) {
7508 if (value == 0) {
7509 __ testl(dest, dest);
7510 } else {
7511 __ cmpl(dest, Immediate(value));
7512 }
7513}
7514
7515void CodeGeneratorX86_64::Compare64BitValue(CpuRegister dest, int64_t value) {
7516 if (IsInt<32>(value)) {
7517 if (value == 0) {
7518 __ testq(dest, dest);
7519 } else {
7520 __ cmpq(dest, Immediate(static_cast<int32_t>(value)));
7521 }
7522 } else {
7523 // Value won't fit in an int.
7524 __ cmpq(dest, LiteralInt64Address(value));
7525 }
7526}
7527
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007528void CodeGeneratorX86_64::GenerateIntCompare(Location lhs, Location rhs) {
7529 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
jessicahandojo4877b792016-09-08 19:49:13 -07007530 GenerateIntCompare(lhs_reg, rhs);
7531}
7532
7533void CodeGeneratorX86_64::GenerateIntCompare(CpuRegister lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007534 if (rhs.IsConstant()) {
7535 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007536 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007537 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007538 __ cmpl(lhs, Address(CpuRegister(RSP), rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007539 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007540 __ cmpl(lhs, rhs.AsRegister<CpuRegister>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007541 }
7542}
7543
7544void CodeGeneratorX86_64::GenerateLongCompare(Location lhs, Location rhs) {
7545 CpuRegister lhs_reg = lhs.AsRegister<CpuRegister>();
7546 if (rhs.IsConstant()) {
7547 int64_t value = rhs.GetConstant()->AsLongConstant()->GetValue();
7548 Compare64BitValue(lhs_reg, value);
7549 } else if (rhs.IsDoubleStackSlot()) {
7550 __ cmpq(lhs_reg, Address(CpuRegister(RSP), rhs.GetStackIndex()));
7551 } else {
7552 __ cmpq(lhs_reg, rhs.AsRegister<CpuRegister>());
7553 }
7554}
7555
7556Address CodeGeneratorX86_64::ArrayAddress(CpuRegister obj,
7557 Location index,
7558 ScaleFactor scale,
7559 uint32_t data_offset) {
7560 return index.IsConstant() ?
7561 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7562 Address(obj, index.AsRegister<CpuRegister>(), scale, data_offset);
7563}
7564
Mark Mendellcfa410b2015-05-25 16:02:44 -04007565void CodeGeneratorX86_64::Store64BitValueToStack(Location dest, int64_t value) {
7566 DCHECK(dest.IsDoubleStackSlot());
7567 if (IsInt<32>(value)) {
7568 // Can move directly as an int32 constant.
7569 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()),
7570 Immediate(static_cast<int32_t>(value)));
7571 } else {
7572 Load64BitValue(CpuRegister(TMP), value);
7573 __ movq(Address(CpuRegister(RSP), dest.GetStackIndex()), CpuRegister(TMP));
7574 }
7575}
7576
Mark Mendell9c86b482015-09-18 13:36:07 -04007577/**
7578 * Class to handle late fixup of offsets into constant area.
7579 */
7580class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
7581 public:
7582 RIPFixup(CodeGeneratorX86_64& codegen, size_t offset)
7583 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7584
7585 protected:
7586 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7587
7588 CodeGeneratorX86_64* codegen_;
7589
7590 private:
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01007591 void Process(const MemoryRegion& region, int pos) override {
Mark Mendell9c86b482015-09-18 13:36:07 -04007592 // Patch the correct offset for the instruction. We use the address of the
7593 // 'next' instruction, which is 'pos' (patch the 4 bytes before).
7594 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7595 int32_t relative_position = constant_offset - pos;
7596
7597 // Patch in the right value.
7598 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7599 }
7600
7601 // Location in constant area that the fixup refers to.
7602 size_t offset_into_constant_area_;
7603};
7604
7605/**
7606 t * Class to handle late fixup of offsets to a jump table that will be created in the
7607 * constant area.
7608 */
7609class JumpTableRIPFixup : public RIPFixup {
7610 public:
7611 JumpTableRIPFixup(CodeGeneratorX86_64& codegen, HPackedSwitch* switch_instr)
7612 : RIPFixup(codegen, -1), switch_instr_(switch_instr) {}
7613
7614 void CreateJumpTable() {
7615 X86_64Assembler* assembler = codegen_->GetAssembler();
7616
7617 // Ensure that the reference to the jump table has the correct offset.
7618 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7619 SetOffset(offset_in_constant_table);
7620
7621 // Compute the offset from the start of the function to this jump table.
7622 const int32_t current_table_offset = assembler->CodeSize() + offset_in_constant_table;
7623
7624 // Populate the jump table with the correct values for the jump table.
7625 int32_t num_entries = switch_instr_->GetNumEntries();
7626 HBasicBlock* block = switch_instr_->GetBlock();
7627 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7628 // The value that we want is the target offset - the position of the table.
7629 for (int32_t i = 0; i < num_entries; i++) {
7630 HBasicBlock* b = successors[i];
7631 Label* l = codegen_->GetLabelOf(b);
7632 DCHECK(l->IsBound());
7633 int32_t offset_to_block = l->Position() - current_table_offset;
7634 assembler->AppendInt32(offset_to_block);
7635 }
7636 }
7637
7638 private:
7639 const HPackedSwitch* switch_instr_;
7640};
7641
Mark Mendellf55c3e02015-03-26 21:07:46 -04007642void CodeGeneratorX86_64::Finalize(CodeAllocator* allocator) {
7643 // Generate the constant area if needed.
Mark Mendell39dcf552015-04-09 20:42:42 -04007644 X86_64Assembler* assembler = GetAssembler();
Mark Mendell9c86b482015-09-18 13:36:07 -04007645 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7646 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8 byte values.
Mark Mendell39dcf552015-04-09 20:42:42 -04007647 assembler->Align(4, 0);
7648 constant_area_start_ = assembler->CodeSize();
Mark Mendell9c86b482015-09-18 13:36:07 -04007649
7650 // Populate any jump tables.
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007651 for (JumpTableRIPFixup* jump_table : fixups_to_jump_tables_) {
Mark Mendell9c86b482015-09-18 13:36:07 -04007652 jump_table->CreateJumpTable();
7653 }
7654
7655 // And now add the constant area to the generated code.
Mark Mendell39dcf552015-04-09 20:42:42 -04007656 assembler->AddConstantArea();
Mark Mendellf55c3e02015-03-26 21:07:46 -04007657 }
7658
7659 // And finish up.
7660 CodeGenerator::Finalize(allocator);
7661}
7662
Mark Mendellf55c3e02015-03-26 21:07:46 -04007663Address CodeGeneratorX86_64::LiteralDoubleAddress(double v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007664 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddDouble(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007665 return Address::RIP(fixup);
7666}
7667
7668Address CodeGeneratorX86_64::LiteralFloatAddress(float v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007669 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddFloat(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007670 return Address::RIP(fixup);
7671}
7672
7673Address CodeGeneratorX86_64::LiteralInt32Address(int32_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007674 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt32(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007675 return Address::RIP(fixup);
7676}
7677
7678Address CodeGeneratorX86_64::LiteralInt64Address(int64_t v) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007679 AssemblerFixup* fixup = new (GetGraph()->GetAllocator()) RIPFixup(*this, __ AddInt64(v));
Mark Mendellf55c3e02015-03-26 21:07:46 -04007680 return Address::RIP(fixup);
7681}
7682
Andreas Gampe85b62f22015-09-09 13:15:38 -07007683// TODO: trg as memory.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007684void CodeGeneratorX86_64::MoveFromReturnRegister(Location trg, DataType::Type type) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07007685 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007686 DCHECK_EQ(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007687 return;
7688 }
7689
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007690 DCHECK_NE(type, DataType::Type::kVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007691
7692 Location return_loc = InvokeDexCallingConventionVisitorX86_64().GetReturnLocation(type);
7693 if (trg.Equals(return_loc)) {
7694 return;
7695 }
7696
7697 // Let the parallel move resolver take care of all of this.
Vladimir Markoca6fff82017-10-03 14:49:14 +01007698 HParallelMove parallel_move(GetGraph()->GetAllocator());
Andreas Gampe85b62f22015-09-09 13:15:38 -07007699 parallel_move.AddMove(return_loc, trg, type, nullptr);
7700 GetMoveResolver()->EmitNativeCode(&parallel_move);
7701}
7702
Mark Mendell9c86b482015-09-18 13:36:07 -04007703Address CodeGeneratorX86_64::LiteralCaseTable(HPackedSwitch* switch_instr) {
7704 // Create a fixup to be used to create and address the jump table.
7705 JumpTableRIPFixup* table_fixup =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007706 new (GetGraph()->GetAllocator()) JumpTableRIPFixup(*this, switch_instr);
Mark Mendell9c86b482015-09-18 13:36:07 -04007707
7708 // We have to populate the jump tables.
7709 fixups_to_jump_tables_.push_back(table_fixup);
7710 return Address::RIP(table_fixup);
7711}
7712
Mark Mendellea5af682015-10-22 17:35:49 -04007713void CodeGeneratorX86_64::MoveInt64ToAddress(const Address& addr_low,
7714 const Address& addr_high,
7715 int64_t v,
7716 HInstruction* instruction) {
7717 if (IsInt<32>(v)) {
7718 int32_t v_32 = v;
7719 __ movq(addr_low, Immediate(v_32));
7720 MaybeRecordImplicitNullCheck(instruction);
7721 } else {
7722 // Didn't fit in a register. Do it in pieces.
7723 int32_t low_v = Low32Bits(v);
7724 int32_t high_v = High32Bits(v);
7725 __ movl(addr_low, Immediate(low_v));
7726 MaybeRecordImplicitNullCheck(instruction);
7727 __ movl(addr_high, Immediate(high_v));
7728 }
7729}
7730
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007731void CodeGeneratorX86_64::PatchJitRootUse(uint8_t* code,
7732 const uint8_t* roots_data,
7733 const PatchInfo<Label>& info,
7734 uint64_t index_in_table) const {
7735 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7736 uintptr_t address =
7737 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
Andreas Gampec55bb392018-09-21 00:02:02 +00007738 using unaligned_uint32_t __attribute__((__aligned__(1))) = uint32_t;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007739 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7740 dchecked_integral_cast<uint32_t>(address);
7741}
7742
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007743void CodeGeneratorX86_64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7744 for (const PatchInfo<Label>& info : jit_string_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007745 StringReference string_reference(info.target_dex_file, dex::StringIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007746 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007747 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007748 }
7749
7750 for (const PatchInfo<Label>& info : jit_class_patches_) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00007751 TypeReference type_reference(info.target_dex_file, dex::TypeIndex(info.offset_or_index));
Vladimir Marko174b2e22017-10-12 13:34:49 +01007752 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01007753 PatchJitRootUse(code, roots_data, info, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007754 }
7755}
7756
Shalini Salomi Bodapatib45a4352019-07-10 16:09:41 +05307757bool LocationsBuilderX86_64::CpuHasAvxFeatureFlag() {
7758 return codegen_->GetInstructionSetFeatures().HasAVX();
7759}
7760
7761bool LocationsBuilderX86_64::CpuHasAvx2FeatureFlag() {
7762 return codegen_->GetInstructionSetFeatures().HasAVX2();
7763}
7764
7765bool InstructionCodeGeneratorX86_64::CpuHasAvxFeatureFlag() {
7766 return codegen_->GetInstructionSetFeatures().HasAVX();
7767}
7768
7769bool InstructionCodeGeneratorX86_64::CpuHasAvx2FeatureFlag() {
7770 return codegen_->GetInstructionSetFeatures().HasAVX2();
7771}
7772
Roland Levillain4d027112015-07-01 15:41:14 +01007773#undef __
7774
Nicolas Geoffray9cf35522014-06-09 18:40:10 +01007775} // namespace x86_64
7776} // namespace art